diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index ff7c66ef8..5cf16b51e 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -17,7 +17,7 @@ on: env: FRONTEND_BRANCH: master - GH_TOKEN: ${{ secrets.API_TOKEN_EXT }} + GITHUB_TOKEN: ${{ secrets.DOCS_GITHUB_TOKEN }} jobs: docs: diff --git a/CHANGELOG.md b/CHANGELOG.md index b295118a0..451da2348 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,23 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog], and this project adheres to [Semantic Versioning]. +## [7.0.2] - 2023-10-10 + +### Added + +- database: Added `dipdup_wipe` and `dipdup_approve` SQL functions to the schema. + +### Fixed + +- cli: Fixed `schema wipe` command for SQLite databases. +- tezos.tzkt: Fixed regression in `get_transactions` method pagination. + +## [6.5.13] - 2023-10-10 + +### Fixed + +- tzkt: Fixed regression in `get_transactions` method pagination. + ## [7.0.1] - 2023-09-30 ### Added diff --git a/docs/5.advanced/1.reindexing.md b/docs/5.advanced/1.reindexing.md index c7bdd75b6..741964f5f 100644 --- a/docs/5.advanced/1.reindexing.md +++ b/docs/5.advanced/1.reindexing.md @@ -7,13 +7,13 @@ description: "In some cases, DipDup can't proceed with indexing without a full w In some cases, DipDup can't proceed with indexing without a full wipe. Several reasons trigger reindexing: -| reason | description | -| ----------------- | ------------------------------------------------------------------------------------------------------------------------- | -| `manual` | Reindexing triggered manually from callback with `ctx.reindex`. | -| `migration` | Applied migration requires reindexing. Check release notes before switching between major DipDup versions to be prepared. | -| `rollback` | Reorg message received from TzKT can not be processed. | -| `config_modified` | One of the index configs has been modified. | -| `schema_modified` | Database schema has been modified. Try to avoid manual schema modifications in favor of [sql](../5.advanced/6.sql.md). | +| reason | description | +| ----------------- | ------------------------------------------------------------------------------------------------------------------------------ | +| `manual` | Reindexing triggered manually from callback with `ctx.reindex`. | +| `migration` | Applied migration requires reindexing. Check release notes before switching between major DipDup versions to be prepared. | +| `rollback` | Reorg message received from datasource and can not be processed. | +| `config_modified` | One of the index configs has been modified. | +| `schema_modified` | Database schema has been modified. Try to avoid manual schema modifications in favor of [SQL scripts](../5.advanced/3.sql.md). | It is possible to configure desirable action on reindexing triggered by a specific reason. diff --git a/docs/5.advanced/3.internal-tables.md b/docs/5.advanced/3.sql.md similarity index 54% rename from docs/5.advanced/3.internal-tables.md rename to docs/5.advanced/3.sql.md index 2b938bec6..1a9b1170c 100644 --- a/docs/5.advanced/3.internal-tables.md +++ b/docs/5.advanced/3.sql.md @@ -1,11 +1,13 @@ --- -title: "Internal tables" -description: "This page describes the internal tables used by DipDup. They are created automatically and are not intended to be modified by the user. However, they can be useful for external monitoring and debugging." +title: "Advanced SQL" +description: "Put your *.sql scripts to dipdup_indexer/sql. You can run these scripts from any callback with ctx.execute_sql('name'). If name is a directory, each script it contains will be executed." --- -# Internal tables +# Advanced SQL -This page describes the internal tables used by DipDup. They are created automatically and are not intended to be modified by the user. However, they can be useful for external monitoring and debugging. +## Internal tables + +Several tables haing `dipdup_` prefix are created by DipDup automatically and are not intended to be modified by the user. However, they can be useful for external monitoring and debugging. | table | description | |:-------------------------- |:----------------------------------------------------------------------------------------------------------------------------------------- | @@ -15,8 +17,8 @@ This page describes the internal tables used by DipDup. They are created automat | `dipdup_contract` | Info about contracts used by all indexes, including ones added in runtime. | | `dipdup_model_update` | Service table to store model diffs for database rollback. Configured by `advanced.rollback_depth` | | `dipdup_meta` | Arbitrary key-value storage for DipDup internal use. Survives reindexing. You can use it too, but don't touch keys with `dipdup_` prefix. | -| `dipdup_contract_metadata` | See Metadata interface page | -| `dipdup_token_metadata` | See Metadata interface page | +| `dipdup_contract_metadata` | See [Metadata interface](/docs/advanced/metadata-interface) | +| `dipdup_token_metadata` | See [Metadata interface](/docs/advanced/metadata-interface) | See [`dipdup.models` module](https://github.com/dipdup-io/dipdup/blob/next/src/dipdup/models/__init__.py) for exact table definitions. @@ -32,3 +34,28 @@ SELECT name, status FROM dipdup_index; -- Get last reindex time SELECT created_at FROM dipdup_schema WHERE name = 'public'; ``` + +## Scripts + +Put your `*.sql` scripts to `{{ project.package }}/sql`. You can run these scripts from any callback with `ctx.execute_sql('name')`. If `name` is a directory, each script it contains will be executed. + +Scripts are executed without being wrapped with SQL transactions. It's generally a good idea to avoid touching table data in scripts. + +By default, an empty `sql/` directory is generated for every hook in config during init. Remove `ctx.execute_sql` call from hook callback to avoid executing them. + +```python +# Execute all scripts in sql/my_hook directory +await ctx.execute_sql('my_hook') + +# Execute a single script +await ctx.execute_sql('my_hook/my_script.sql') +``` + +## Managing schema + +When using PostgreSQL as database engine you can use `dipdup_approve` and `dipdup_wipe` functions to manage schema state from SQL console if needed: + +```sql +SELECT dipdup_approve('public'); +SELECT dipdup_wipe('public'); +``` diff --git a/docs/5.advanced/6.sql.md b/docs/5.advanced/6.sql.md deleted file mode 100644 index 8580e4b65..000000000 --- a/docs/5.advanced/6.sql.md +++ /dev/null @@ -1,22 +0,0 @@ ---- -title: "SQL scripts" -description: "Put your *.sql scripts to dipdup_indexer/sql. You can run these scripts from any callback with ctx.execute_sql('name'). If name is a directory, each script it contains will be executed." ---- - -# SQL scripts - -Put your `*.sql` scripts to `{{ project.package }}/sql`. You can run these scripts from any callback with `ctx.execute_sql('name')`. If `name` is a directory, each script it contains will be executed. - -Scripts are executed without being wrapped with SQL transactions. It's generally a good idea to avoid touching table data in scripts. - -By default, an empty `sql/` directory is generated for every hook in config during init. Remove `ctx.execute_sql` call from hook callback to avoid executing them. - -## Usage - -```python -# Execute all scripts in sql/my_hook directory -await ctx.execute_sql('my_hook') - -# Execute a single script -await ctx.execute_sql('my_hook/my_script.sql') -``` diff --git a/pdm.lock b/pdm.lock index 187473c1a..a2c96f1ac 100644 --- a/pdm.lock +++ b/pdm.lock @@ -10,7 +10,7 @@ content_hash = "sha256:efee5be5a71d12cb011518dfb65eeafb79905f8b94869c0867032ec6a [[package]] name = "aiohttp" -version = "3.8.5" +version = "3.8.6" requires_python = ">=3.6" summary = "Async http client/server framework (asyncio)" dependencies = [ @@ -23,22 +23,22 @@ dependencies = [ "yarl<2.0,>=1.0", ] files = [ - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae871a964e1987a943d83d6709d20ec6103ca1eaf52f7e0d36ee1b5bebb8b9b9"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:461908b2578955045efde733719d62f2b649c404189a09a632d245b445c9c975"}, - {file = "aiohttp-3.8.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:72a860c215e26192379f57cae5ab12b168b75db8271f111019509a1196dfc780"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc14be025665dba6202b6a71cfcdb53210cc498e50068bc088076624471f8bb9"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8af740fc2711ad85f1a5c034a435782fbd5b5f8314c9a3ef071424a8158d7f6b"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:841cd8233cbd2111a0ef0a522ce016357c5e3aff8a8ce92bcfa14cef890d698f"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ed1c46fb119f1b59304b5ec89f834f07124cd23ae5b74288e364477641060ff"}, - {file = "aiohttp-3.8.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84f8ae3e09a34f35c18fa57f015cc394bd1389bce02503fb30c394d04ee6b938"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62360cb771707cb70a6fd114b9871d20d7dd2163a0feafe43fd115cfe4fe845e"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:23fb25a9f0a1ca1f24c0a371523546366bb642397c94ab45ad3aedf2941cec6a"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b0ba0d15164eae3d878260d4c4df859bbdc6466e9e6689c344a13334f988bb53"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5d20003b635fc6ae3f96d7260281dfaf1894fc3aa24d1888a9b2628e97c241e5"}, - {file = "aiohttp-3.8.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0175d745d9e85c40dcc51c8f88c74bfbaef9e7afeeeb9d03c37977270303064c"}, - {file = "aiohttp-3.8.5-cp311-cp311-win32.whl", hash = "sha256:2e1b1e51b0774408f091d268648e3d57f7260c1682e7d3a63cb00d22d71bb945"}, - {file = "aiohttp-3.8.5-cp311-cp311-win_amd64.whl", hash = "sha256:043d2299f6dfdc92f0ac5e995dfc56668e1587cea7f9aa9d8a78a1b6554e5755"}, - {file = "aiohttp-3.8.5.tar.gz", hash = "sha256:b9552ec52cc147dbf1944ac7ac98af7602e51ea2dcd076ed194ca3c0d1c7d0bc"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e2f9cc8e5328f829f6e1fb74a0a3a939b14e67e80832975e01929e320386b34"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e6a00ffcc173e765e200ceefb06399ba09c06db97f401f920513a10c803604ca"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:41bdc2ba359032e36c0e9de5a3bd00d6fb7ea558a6ce6b70acedf0da86458321"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14cd52ccf40006c7a6cd34a0f8663734e5363fd981807173faf3a017e202fec9"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d5b785c792802e7b275c420d84f3397668e9d49ab1cb52bd916b3b3ffcf09ad"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1bed815f3dc3d915c5c1e556c397c8667826fbc1b935d95b0ad680787896a358"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96603a562b546632441926cd1293cfcb5b69f0b4159e6077f7c7dbdfb686af4d"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d76e8b13161a202d14c9584590c4df4d068c9567c99506497bdd67eaedf36403"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e3f1e3f1a1751bb62b4a1b7f4e435afcdade6c17a4fd9b9d43607cebd242924a"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:76b36b3124f0223903609944a3c8bf28a599b2cc0ce0be60b45211c8e9be97f8"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a2ece4af1f3c967a4390c284797ab595a9f1bc1130ef8b01828915a05a6ae684"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:16d330b3b9db87c3883e565340d292638a878236418b23cc8b9b11a054aaa887"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:42c89579f82e49db436b69c938ab3e1559e5a4409eb8639eb4143989bc390f2f"}, + {file = "aiohttp-3.8.6-cp311-cp311-win32.whl", hash = "sha256:efd2fcf7e7b9d7ab16e6b7d54205beded0a9c8566cb30f09c1abe42b4e22bdcb"}, + {file = "aiohttp-3.8.6-cp311-cp311-win_amd64.whl", hash = "sha256:3b2ab182fc28e7a81f6c70bfbd829045d9480063f5ab06f6e601a3eddbbd49a0"}, + {file = "aiohttp-3.8.6.tar.gz", hash = "sha256:b0cf2a4501bff9330a8a5248b4ce951851e415bdcce9dc158e76cfd55e15085c"}, ] [[package]] @@ -406,7 +406,7 @@ files = [ [[package]] name = "datamodel-code-generator" -version = "0.22.0" +version = "0.22.1" requires_python = ">=3.7,<4.0" summary = "Datamodel Code Generator" dependencies = [ @@ -420,12 +420,12 @@ dependencies = [ "openapi-spec-validator<=0.5.7,>=0.2.8", "packaging", "prance>=0.18.2", - "pydantic[email]<3.0,>=1.10.0; python_version >= \"3.11\" and python_version < \"4.0\"", + "pydantic[email]!=2.4.0,<3.0,>=1.10.0; python_version >= \"3.11\" and python_version < \"4.0\"", "toml<1.0.0,>=0.10.0", ] files = [ - {file = "datamodel_code_generator-0.22.0-py3-none-any.whl", hash = "sha256:5cf8fc4fb6fe7aa750595a558cd4fcd43e36e862f40b0fa4cc123b4548b16a1e"}, - {file = "datamodel_code_generator-0.22.0.tar.gz", hash = "sha256:73ebcefa498e39d0f210923856cb4a498bacc3b7bdea140cca7324e25f5c581b"}, + {file = "datamodel_code_generator-0.22.1-py3-none-any.whl", hash = "sha256:ac1fbc4fa778c2a43f740740fd352ca4300f705044e112a0023af8d04f0b61af"}, + {file = "datamodel_code_generator-0.22.1.tar.gz", hash = "sha256:48c8ce0b38b575bcc573237bb3faab696b072aa131b3f008c848d2c3b24a4417"}, ] [[package]] @@ -876,7 +876,7 @@ files = [ [[package]] name = "mypy" -version = "1.5.1" +version = "1.6.0" requires_python = ">=3.8" summary = "Optional static typing for Python" dependencies = [ @@ -884,13 +884,13 @@ dependencies = [ "typing-extensions>=4.1.0", ] files = [ - {file = "mypy-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4"}, - {file = "mypy-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4"}, - {file = "mypy-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243"}, - {file = "mypy-1.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275"}, - {file = "mypy-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315"}, - {file = "mypy-1.5.1-py3-none-any.whl", hash = "sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5"}, - {file = "mypy-1.5.1.tar.gz", hash = "sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92"}, + {file = "mypy-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c8835a07b8442da900db47ccfda76c92c69c3a575872a5b764332c4bacb5a0a"}, + {file = "mypy-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:24f3de8b9e7021cd794ad9dfbf2e9fe3f069ff5e28cb57af6f873ffec1cb0425"}, + {file = "mypy-1.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:856bad61ebc7d21dbc019b719e98303dc6256cec6dcc9ebb0b214b81d6901bd8"}, + {file = "mypy-1.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:89513ddfda06b5c8ebd64f026d20a61ef264e89125dc82633f3c34eeb50e7d60"}, + {file = "mypy-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:9f8464ed410ada641c29f5de3e6716cbdd4f460b31cf755b2af52f2d5ea79ead"}, + {file = "mypy-1.6.0-py3-none-any.whl", hash = "sha256:9e1589ca150a51d9d00bb839bfeca2f7a04f32cd62fad87a847bc0818e15d7dc"}, + {file = "mypy-1.6.0.tar.gz", hash = "sha256:4f3d27537abde1be6d5f2c96c29a454da333a2a271ae7d5bc7110e6d4b7beb3f"}, ] [[package]] @@ -954,21 +954,21 @@ files = [ [[package]] name = "orjson" -version = "3.9.7" -requires_python = ">=3.7" +version = "3.9.8" +requires_python = ">=3.8" summary = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" files = [ - {file = "orjson-3.9.7-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:1f8b47650f90e298b78ecf4df003f66f54acdba6a0f763cc4df1eab048fe3738"}, - {file = "orjson-3.9.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f738fee63eb263530efd4d2e9c76316c1f47b3bbf38c1bf45ae9625feed0395e"}, - {file = "orjson-3.9.7-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:38e34c3a21ed41a7dbd5349e24c3725be5416641fdeedf8f56fcbab6d981c900"}, - {file = "orjson-3.9.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21a3344163be3b2c7e22cef14fa5abe957a892b2ea0525ee86ad8186921b6cf0"}, - {file = "orjson-3.9.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23be6b22aab83f440b62a6f5975bcabeecb672bc627face6a83bc7aeb495dc7e"}, - {file = "orjson-3.9.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5205ec0dfab1887dd383597012199f5175035e782cdb013c542187d280ca443"}, - {file = "orjson-3.9.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8769806ea0b45d7bf75cad253fba9ac6700b7050ebb19337ff6b4e9060f963fa"}, - {file = "orjson-3.9.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f9e01239abea2f52a429fe9d95c96df95f078f0172489d691b4a848ace54a476"}, - {file = "orjson-3.9.7-cp311-none-win32.whl", hash = "sha256:8bdb6c911dae5fbf110fe4f5cba578437526334df381b3554b6ab7f626e5eeca"}, - {file = "orjson-3.9.7-cp311-none-win_amd64.whl", hash = "sha256:9d62c583b5110e6a5cf5169ab616aa4ec71f2c0c30f833306f9e378cf51b6c86"}, - {file = "orjson-3.9.7.tar.gz", hash = "sha256:85e39198f78e2f7e054d296395f6c96f5e02892337746ef5b6a1bf3ed5910142"}, + {file = "orjson-3.9.8-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:8a1c92f467f5fd0f8fb79273006b563364b1e45667b3760423498348dc2e22fa"}, + {file = "orjson-3.9.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:742d4d16d66579ffff4b2048a8de4a0b03d731847233e92c4edd418a9c582d0f"}, + {file = "orjson-3.9.8-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6d1aab08b373232f568ea9ae048f9f77e09f389068afee6dd44bb6140e2c3ea3"}, + {file = "orjson-3.9.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:68ed63273ec4ecdd7865e9d984d65a749c0d780882cf9dde6ab2bc6323f6471a"}, + {file = "orjson-3.9.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d23edcb32383f3d86b2f4914f9825ce2d67625abd34be6e5ed1f59ec30127b7a"}, + {file = "orjson-3.9.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9bcd3a48b260d3dfe68b8ce93d11f99a70bd4c908efe22d195a1b1dcfb15ac2"}, + {file = "orjson-3.9.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9ce982f3c1df83f7dc74f3b2690605470ff4790d12558e44359f01e822c5cb08"}, + {file = "orjson-3.9.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4433dd903d5b022a64e9dd1dca94f08ab04d5d928a0ecd33dd46110468960879"}, + {file = "orjson-3.9.8-cp311-none-win32.whl", hash = "sha256:a119c73520192c2882d0549151b9cdd65e0bb5396bedf8951ba5f70d6a873879"}, + {file = "orjson-3.9.8-cp311-none-win_amd64.whl", hash = "sha256:764306f6370e6c76cbbf3139dd9b05be9c4481ee0b15966bd1907827a5777216"}, + {file = "orjson-3.9.8.tar.gz", hash = "sha256:ed1adc6db9841974170a5195b827ee4e392b1e8ca385b19fcdc3248489844059"}, ] [[package]] diff --git a/pyproject.toml b/pyproject.toml index 0029cf2c7..256225930 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [project] name = "dipdup" description = "Modular framework for creating selective indexers and featureful backends for dapps" -version = "7.0.1" +version = "7.0.2" license = { text = "MIT" } authors = [ { name = "Lev Gorodetskii", email = "dipdup@drsr.io" }, diff --git a/requirements.dev.txt b/requirements.dev.txt index ad00f7d23..642cc6d9a 100644 --- a/requirements.dev.txt +++ b/requirements.dev.txt @@ -1,7 +1,7 @@ # This file is @generated by PDM. # Please do not edit it manually. -aiohttp==3.8.5 +aiohttp==3.8.6 aiolimiter==1.1.0 aiosignal==1.3.1 aiosqlite==0.17.0 @@ -23,7 +23,7 @@ charset-normalizer==3.2.0 click==8.1.7 coverage==7.3.0 cytoolz==0.12.2 -datamodel-code-generator==0.22.0 +datamodel-code-generator==0.22.1 dnspython==2.4.2 docker==6.1.3 docutils==0.20.1 @@ -54,12 +54,12 @@ lru-dict==1.2.0 MarkupSafe==2.1.3 msgpack==1.0.5 multidict==6.0.4 -mypy==1.5.1 +mypy==1.6.0 mypy-extensions==1.0.0 numpy==1.25.2 openapi-schema-validator==0.4.4 openapi-spec-validator==0.5.7 -orjson==3.9.7 +orjson==3.9.8 packaging==23.1 parsimonious==0.9.0 pathable==0.4.3 diff --git a/requirements.txt b/requirements.txt index 140e0127d..7e9cbb23d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ # This file is @generated by PDM. # Please do not edit it manually. -aiohttp==3.8.5 +aiohttp==3.8.6 aiolimiter==1.1.0 aiosignal==1.3.1 aiosqlite==0.17.0 @@ -20,7 +20,7 @@ chardet==5.2.0 charset-normalizer==3.2.0 click==8.1.7 cytoolz==0.12.2 -datamodel-code-generator==0.22.0 +datamodel-code-generator==0.22.1 dnspython==2.4.2 email-validator==2.0.0.post2 eth-abi==4.2.1 @@ -50,7 +50,7 @@ mypy-extensions==1.0.0 numpy==1.25.2 openapi-schema-validator==0.4.4 openapi-spec-validator==0.5.7 -orjson==3.9.7 +orjson==3.9.8 packaging==23.1 parsimonious==0.9.0 pathable==0.4.3 diff --git a/scripts/dump_schema.py b/scripts/dump_schema.py index b85b43d2b..55cc32f69 100644 --- a/scripts/dump_schema.py +++ b/scripts/dump_schema.py @@ -2,7 +2,7 @@ from pathlib import Path import orjson -from dc_schema import get_schema # type: ignore[import] +from dc_schema import get_schema # type: ignore[import-not-found] from dipdup.config import DipDupConfig diff --git a/src/demo_uniswap/models/repo.py b/src/demo_uniswap/models/repo.py index b0d64779b..736fe328d 100644 --- a/src/demo_uniswap/models/repo.py +++ b/src/demo_uniswap/models/repo.py @@ -2,7 +2,7 @@ from typing import Any from typing import cast -from lru import LRU # type: ignore[import] +from lru import LRU # type: ignore[import-not-found] import demo_uniswap.models as models from dipdup.config.evm import EvmContractConfig @@ -32,7 +32,7 @@ def save_pending_position(self, idx: str, position: dict[str, Any]) -> None: self._pending_positions[idx] = position def get_pending_position(self, idx: str) -> dict[str, Any] | None: - return self._pending_positions.get(idx, None) + return self._pending_positions.get(idx, None) # type: ignore[no-any-return] async def get_ctx_factory(ctx: HandlerContext) -> models.Factory: diff --git a/src/dipdup/cli.py b/src/dipdup/cli.py index a2a32ce8a..9d49d9657 100644 --- a/src/dipdup/cli.py +++ b/src/dipdup/cli.py @@ -507,7 +507,9 @@ async def schema_wipe(ctx: click.Context, immune: bool, force: bool) -> None: conn = get_connection() await wipe_schema( conn=conn, - schema_name=config.database.schema_name, + schema_name=config.database.path + if isinstance(config.database, SqliteDatabaseConfig) + else config.database.schema_name, immune_tables=immune_tables, ) diff --git a/src/dipdup/database.py b/src/dipdup/database.py index 76f1c6c10..cff3cf138 100644 --- a/src/dipdup/database.py +++ b/src/dipdup/database.py @@ -14,8 +14,8 @@ from typing import Any from typing import cast -import asyncpg.exceptions # type: ignore[import] -import sqlparse # type: ignore[import] +import asyncpg.exceptions # type: ignore[import-untyped] +import sqlparse # type: ignore[import-untyped] from tortoise import Tortoise from tortoise.backends.asyncpg.client import AsyncpgDBClient from tortoise.backends.base.client import BaseDBAsyncClient @@ -194,44 +194,54 @@ async def generate_schema( conn: SupportedClient, name: str, ) -> None: - if isinstance(conn, AsyncpgClient): - await pg_create_schema(conn, name) + if isinstance(conn, SqliteClient): + await Tortoise.generate_schemas() + elif isinstance(conn, AsyncpgClient): + await _pg_create_schema(conn, name) + await Tortoise.generate_schemas() + await _pg_create_functions(conn) + await _pg_create_views(conn) + else: + raise NotImplementedError - await Tortoise.generate_schemas() - if isinstance(conn, AsyncpgClient): - # NOTE: Create a view for monitoring head status - sql_path = Path(__file__).parent / 'sql' / 'dipdup_head_status.sql' - # TODO: Configurable interval - await execute_sql(conn, sql_path, HEAD_STATUS_TIMEOUT) +async def _pg_create_functions(conn: AsyncpgClient) -> None: + for fn in ( + 'dipdup_approve.sql', + 'dipdup_wipe.sql', + ): + sql_path = Path(__file__).parent / 'sql' / fn + await execute_sql(conn, sql_path) -async def _wipe_schema_postgres( +async def _pg_create_views(conn: AsyncpgClient) -> None: + sql_path = Path(__file__).parent / 'sql' / 'dipdup_head_status.sql' + # TODO: Configurable interval + await execute_sql(conn, sql_path, HEAD_STATUS_TIMEOUT) + + +async def _pg_wipe_schema( conn: AsyncpgClient, schema_name: str, immune_tables: set[str], ) -> None: immune_schema_name = f'{schema_name}_immune' - # NOTE: Create a truncate_schema function to trigger cascade deletion - sql_path = Path(__file__).parent / 'sql' / 'truncate_schema.sql' - await execute_sql(conn, sql_path, schema_name, immune_schema_name) - # NOTE: Move immune tables to a separate schema - it's free! if immune_tables: - await pg_create_schema(conn, immune_schema_name) + await _pg_create_schema(conn, immune_schema_name) for table in immune_tables: - await pg_move_table(conn, table, schema_name, immune_schema_name) + await _pg_move_table(conn, table, schema_name, immune_schema_name) - await conn.execute_script(f"SELECT truncate_schema('{schema_name}')") + await conn.execute_script(f"SELECT dipdup_wipe('{schema_name}')") if immune_tables: for table in immune_tables: - await pg_move_table(conn, table, immune_schema_name, schema_name) - await pg_drop_schema(conn, immune_schema_name) + await _pg_move_table(conn, table, immune_schema_name, schema_name) + await _pg_drop_schema(conn, immune_schema_name) -async def _wipe_schema_sqlite( +async def _sqlite_wipe_schema( conn: SqliteClient, path: str, immune_tables: set[str], @@ -245,10 +255,10 @@ async def _wipe_schema_sqlite( await conn.execute_script(f'ATTACH DATABASE "{immune_path}" AS {namespace}') # NOTE: Copy immune tables to the new database. - master_query = 'SELECT name, type FROM sqlite_master' + master_query = 'SELECT name FROM sqlite_master WHERE type = "table"' result = await conn.execute_query(master_query) - for name, type_ in result[1]: - if type_ != 'table' or name not in immune_tables: + for name in result[1]: + if name not in immune_tables: # type: ignore[comparison-overlap] continue expr = f'CREATE TABLE {namespace}.{name} AS SELECT * FROM {name}' @@ -271,23 +281,23 @@ async def wipe_schema( """Truncate schema preserving immune tables. Executes in a transaction""" async with conn._in_transaction() as conn: if isinstance(conn, SqliteClient): - await _wipe_schema_sqlite(conn, schema_name, immune_tables) + await _sqlite_wipe_schema(conn, schema_name, immune_tables) elif isinstance(conn, AsyncpgClient): - await _wipe_schema_postgres(conn, schema_name, immune_tables) + await _pg_wipe_schema(conn, schema_name, immune_tables) else: raise NotImplementedError -async def pg_create_schema(conn: AsyncpgClient, name: str) -> None: +async def _pg_create_schema(conn: AsyncpgClient, name: str) -> None: """Create PostgreSQL schema if not exists""" await conn.execute_script(f'CREATE SCHEMA IF NOT EXISTS {name}') -async def pg_drop_schema(conn: AsyncpgClient, name: str) -> None: +async def _pg_drop_schema(conn: AsyncpgClient, name: str) -> None: await conn.execute_script(f'DROP SCHEMA IF EXISTS {name}') -async def pg_move_table(conn: AsyncpgClient, name: str, schema: str, new_schema: str) -> None: +async def _pg_move_table(conn: AsyncpgClient, name: str, schema: str, new_schema: str) -> None: """Move table from one schema to another""" await conn.execute_script(f'ALTER TABLE {schema}.{name} SET SCHEMA {new_schema}') diff --git a/src/dipdup/datasources/evm_subsquid.py b/src/dipdup/datasources/evm_subsquid.py index 1a866c558..e51ed26c1 100644 --- a/src/dipdup/datasources/evm_subsquid.py +++ b/src/dipdup/datasources/evm_subsquid.py @@ -7,7 +7,7 @@ from io import BytesIO from typing import Any -import pyarrow.ipc # type: ignore[import] +import pyarrow.ipc # type: ignore[import-untyped] from dipdup.config import HttpConfig from dipdup.config.evm_subsquid import SubsquidDatasourceConfig diff --git a/src/dipdup/datasources/tezos_tzkt.py b/src/dipdup/datasources/tezos_tzkt.py index ee4a891da..00fd4d479 100644 --- a/src/dipdup/datasources/tezos_tzkt.py +++ b/src/dipdup/datasources/tezos_tzkt.py @@ -713,14 +713,18 @@ async def get_transactions( params = self._get_request_params( first_level=first_level, last_level=last_level, - offset=offset, + # NOTE: This is intentional + offset=None, limit=limit, select=TRANSACTION_OPERATION_FIELDS, values=True, - cursor=True, sort='level', status='applied', ) + # TODO: TzKT doesn't support sort+cr currently + if offset is not None: + params['id.gt'] = offset + if addresses and not code_hashes: params[f'{field}.in'] = ','.join(addresses) elif code_hashes and not addresses: diff --git a/src/dipdup/indexes/tezos_tzkt_operations/matcher.py b/src/dipdup/indexes/tezos_tzkt_operations/matcher.py index 855dd5764..92cf2231a 100644 --- a/src/dipdup/indexes/tezos_tzkt_operations/matcher.py +++ b/src/dipdup/indexes/tezos_tzkt_operations/matcher.py @@ -208,6 +208,8 @@ def match_operation_subgroup( transaction = handler[2][-1] if isinstance(transaction, TzktOperationData): id_list.append(transaction.id) + elif isinstance(transaction, TzktOrigination): + id_list.append(transaction.data.id) elif isinstance(transaction, TzktTransaction): id_list.append(transaction.data.id) else: diff --git a/src/dipdup/indexes/tezos_tzkt_operations/parser.py b/src/dipdup/indexes/tezos_tzkt_operations/parser.py index da8527f3d..85fac26da 100644 --- a/src/dipdup/indexes/tezos_tzkt_operations/parser.py +++ b/src/dipdup/indexes/tezos_tzkt_operations/parser.py @@ -26,7 +26,7 @@ T = TypeVar('T', Hashable, type[BaseModel]) -def extract_root_outer_type(storage_type: type[BaseModel]) -> T: +def extract_root_outer_type(storage_type: type[BaseModel]) -> T: # type: ignore[type-var] """Extract Pydantic __root__ type""" root_field = storage_type.__fields__['__root__'] if root_field.allow_none: diff --git a/src/dipdup/performance.py b/src/dipdup/performance.py index 9d6296db8..05ab7041a 100644 --- a/src/dipdup/performance.py +++ b/src/dipdup/performance.py @@ -27,7 +27,7 @@ from typing import cast from async_lru import alru_cache -from lru import LRU # type: ignore[import] +from lru import LRU # type: ignore[import-not-found] from dipdup.exceptions import FrameworkException @@ -42,7 +42,7 @@ @asynccontextmanager async def with_pprofile(name: str) -> AsyncIterator[None]: try: - import pprofile # type: ignore[import] + import pprofile # type: ignore[import-untyped] _logger.warning('Full profiling is enabled, this will affect performance') except ImportError: diff --git a/src/dipdup/project.py b/src/dipdup/project.py index 9bfc2bd8b..11c96abdb 100644 --- a/src/dipdup/project.py +++ b/src/dipdup/project.py @@ -97,7 +97,7 @@ def prompt_anyof( default: int, ) -> tuple[int, str]: """Ask user to choose one of options; returns index and value""" - import survey # type: ignore[import] + import survey # type: ignore[import-untyped] table = tabulate( zip(options, comments, strict=True), diff --git a/src/dipdup/projects/demo_uniswap/models/repo.py.j2 b/src/dipdup/projects/demo_uniswap/models/repo.py.j2 index d739e3cde..e2e774ce5 100644 --- a/src/dipdup/projects/demo_uniswap/models/repo.py.j2 +++ b/src/dipdup/projects/demo_uniswap/models/repo.py.j2 @@ -2,7 +2,7 @@ from decimal import Decimal from typing import Any from typing import cast -from lru import LRU # type: ignore[import] +from lru import LRU # type: ignore[import-untyped] import {{ project.package }}.models as models from dipdup.config.evm import EvmContractConfig diff --git a/src/dipdup/scheduler.py b/src/dipdup/scheduler.py index 3778b3429..b1555cd07 100644 --- a/src/dipdup/scheduler.py +++ b/src/dipdup/scheduler.py @@ -4,13 +4,13 @@ from functools import partial from typing import Any -from apscheduler.events import EVENT_JOB_ERROR # type: ignore[import] +from apscheduler.events import EVENT_JOB_ERROR # type: ignore[import-untyped] from apscheduler.events import EVENT_JOB_EXECUTED from apscheduler.events import JobEvent -from apscheduler.job import Job # type: ignore[import] -from apscheduler.schedulers.asyncio import AsyncIOScheduler # type: ignore[import] -from apscheduler.triggers.cron import CronTrigger # type: ignore[import] -from apscheduler.triggers.interval import IntervalTrigger # type: ignore[import] +from apscheduler.job import Job # type: ignore[import-untyped] +from apscheduler.schedulers.asyncio import AsyncIOScheduler # type: ignore[import-untyped] +from apscheduler.triggers.cron import CronTrigger # type: ignore[import-untyped] +from apscheduler.triggers.interval import IntervalTrigger # type: ignore[import-untyped] from dipdup.config import JobConfig from dipdup.context import DipDupContext diff --git a/src/dipdup/sql/dipdup_approve.sql b/src/dipdup/sql/dipdup_approve.sql new file mode 100644 index 000000000..5691779e9 --- /dev/null +++ b/src/dipdup/sql/dipdup_approve.sql @@ -0,0 +1,7 @@ +CREATE OR REPLACE FUNCTION dipdup_approve(schema_name VARCHAR) RETURNS void AS $$ +BEGIN + UPDATE dipdup_index SET config_hash = null; + UPDATE dipdup_schema SET reindex = null, hash = null; + RETURN; +END; +$$ LANGUAGE plpgsql; diff --git a/src/dipdup/sql/truncate_schema.sql b/src/dipdup/sql/dipdup_wipe.sql similarity index 86% rename from src/dipdup/sql/truncate_schema.sql rename to src/dipdup/sql/dipdup_wipe.sql index 01f19f7ac..965fc951a 100644 --- a/src/dipdup/sql/truncate_schema.sql +++ b/src/dipdup/sql/dipdup_wipe.sql @@ -1,5 +1,5 @@ -- source of inspiration: https://stackoverflow.com/a/11462481 -CREATE OR REPLACE FUNCTION truncate_schema(schema_name VARCHAR) RETURNS void AS $$ +CREATE OR REPLACE FUNCTION dipdup_wipe(schema_name VARCHAR) RETURNS void AS $$ DECLARE rec RECORD; BEGIN @@ -63,14 +63,6 @@ BEGIN WHEN others THEN END; END LOOP; - -- BEGIN - -- CREATE EXTENSION IF NOT EXISTS pgcrypto; - -- CREATE EXTENSION IF NOT EXISTS timescaledb; - -- EXCEPTION - -- WHEN OTHERS THEN - -- NULL; - -- END; - RETURN; END; $$ LANGUAGE plpgsql; diff --git a/tests/profile_abi_decoding.py b/tests/profile_abi_decoding.py index 483fd16ed..ace6b6b9d 100644 --- a/tests/profile_abi_decoding.py +++ b/tests/profile_abi_decoding.py @@ -1,7 +1,7 @@ import time from pathlib import Path -import pprofile # type: ignore[import] +import pprofile # type: ignore[import-untyped] from dipdup.indexes.evm_subsquid_events.matcher import decode_event_data from dipdup.package import EventAbiExtra diff --git a/tests/test_demos.py b/tests/test_demos.py index 57ea16d67..93d810cb9 100644 --- a/tests/test_demos.py +++ b/tests/test_demos.py @@ -4,6 +4,7 @@ from collections.abc import AsyncIterator from collections.abc import Awaitable from collections.abc import Callable +from contextlib import AbstractAsyncContextManager from contextlib import AsyncExitStack from contextlib import asynccontextmanager from decimal import Decimal @@ -13,6 +14,7 @@ import pytest +from dipdup.database import get_connection from dipdup.database import tortoise_wrapper from dipdup.exceptions import FrameworkException from dipdup.models.tezos_tzkt import TzktOperationType @@ -21,50 +23,58 @@ @asynccontextmanager -async def run_dipdup_demo(config: str, package: str, cmd: str = 'run') -> AsyncIterator[Path]: - config_path = CONFIGS_PATH / config - dipdup_pkg_path = SRC_PATH / 'dipdup' - demo_pkg_path = SRC_PATH / package - sqlite_config_path = Path(__file__).parent / 'configs' / 'sqlite.yaml' - - with tempfile.TemporaryDirectory() as tmp_root_path: +async def tmp_project(config_path: Path, package: str, exists: bool) -> AsyncIterator[tuple[Path, dict[str, str]]]: + with tempfile.TemporaryDirectory() as tmp_package_path: # NOTE: Symlink configs, packages and executables - tmp_config_path = Path(tmp_root_path) / 'dipdup.yaml' + tmp_config_path = Path(tmp_package_path) / 'dipdup.yaml' os.symlink(config_path, tmp_config_path) - tmp_bin_path = Path(tmp_root_path) / 'bin' + tmp_bin_path = Path(tmp_package_path) / 'bin' tmp_bin_path.mkdir() for executable in ('dipdup', 'datamodel-codegen'): if (executable_path := which(executable)) is None: raise FrameworkException(f'Executable `{executable}` not found') os.symlink(executable_path, tmp_bin_path / executable) - tmp_dipdup_pkg_path = Path(tmp_root_path) / 'dipdup' - os.symlink(dipdup_pkg_path, tmp_dipdup_pkg_path) + os.symlink( + SRC_PATH / 'dipdup', + Path(tmp_package_path) / 'dipdup', + ) # NOTE: Ensure that `run` uses existing package and `init` creates a new one - if cmd == 'run': - tmp_demo_pkg_path = Path(tmp_root_path) / package - os.symlink(demo_pkg_path, tmp_demo_pkg_path) + if exists: + os.symlink( + SRC_PATH / package, + Path(tmp_package_path) / package, + ) # NOTE: Prepare environment env = { **os.environ, 'PATH': str(tmp_bin_path), - 'PYTHONPATH': str(tmp_root_path), + 'PYTHONPATH': str(tmp_package_path), 'DIPDUP_TEST': '1', } - subprocess.run( - f'dipdup -c {tmp_config_path} -c {sqlite_config_path} {cmd}', - cwd=tmp_root_path, - check=True, - shell=True, - env=env, - capture_output=True, - ) + yield Path(tmp_package_path), env - yield Path(tmp_root_path) + +async def run_in_tmp( + tmp_path: Path, + env: dict[str, str], + *cmd: str, +) -> None: + sqlite_config_path = Path(__file__).parent / 'configs' / 'sqlite.yaml' + tmp_config_path = Path(tmp_path) / 'dipdup.yaml' + + subprocess.run( + f'dipdup -c {tmp_config_path} -c {sqlite_config_path} {" ".join(cmd)}', + cwd=tmp_path, + check=True, + shell=True, + env=env, + capture_output=True, + ) async def assert_run_token() -> None: @@ -246,21 +256,71 @@ async def assert_run_dao() -> None: @pytest.mark.parametrize(test_args, test_params) -async def test_demos( +async def test_run_init( config: str, package: str, cmd: str, assert_fn: Callable[[], Awaitable[None]], ) -> None: + config_path = CONFIGS_PATH / config async with AsyncExitStack() as stack: - tmp_root_path = await stack.enter_async_context( - run_dipdup_demo(config, package, cmd), + tmp_package_path, env = await stack.enter_async_context( + tmp_project( + config_path, + package, + exists=cmd != 'init', + ), ) + await run_in_tmp(tmp_package_path, env, cmd) await stack.enter_async_context( tortoise_wrapper( - f'sqlite://{tmp_root_path}/db.sqlite3', + f'sqlite://{tmp_package_path}/db.sqlite3', f'{package}.models', ) ) await assert_fn() + + +async def _count_tables() -> int: + conn = get_connection() + _, res = await conn.execute_query('SELECT count(name) FROM sqlite_master WHERE type = "table";') + return int(res[0][0]) + + +async def test_schema() -> None: + package = 'demo_token' + config_path = CONFIGS_PATH / f'{package}.yml' + + async with AsyncExitStack() as stack: + tmp_package_path, env = await stack.enter_async_context( + tmp_project( + config_path, + package, + exists=True, + ), + ) + + def tortoise() -> AbstractAsyncContextManager[None]: + return tortoise_wrapper( + f'sqlite://{tmp_package_path}/db.sqlite3', + f'{package}.models', + ) + + async with tortoise(): + conn = get_connection() + assert (await _count_tables()) == 0 + + await run_in_tmp(tmp_package_path, env, 'schema', 'init') + + async with tortoise(): + conn = get_connection() + assert (await _count_tables()) == 10 + await conn.execute_script('CREATE TABLE test (id INTEGER PRIMARY KEY);') + assert (await _count_tables()) == 11 + + await run_in_tmp(tmp_package_path, env, 'schema', 'wipe', '--force') + + async with tortoise(): + conn = get_connection() + assert (await _count_tables()) == 0 diff --git a/tests/test_hasura.py b/tests/test_hasura.py index cc5e5b792..ea233c607 100644 --- a/tests/test_hasura.py +++ b/tests/test_hasura.py @@ -9,7 +9,7 @@ import pytest from aiohttp import web from aiohttp.pytest_plugin import AiohttpClient -from docker.client import DockerClient # type: ignore[import] +from docker.client import DockerClient # type: ignore[import-untyped] from tortoise import Tortoise from dipdup.config import DipDupConfig diff --git a/tests/test_introspection.py b/tests/test_introspection.py index 957cdd587..ecc4f759e 100644 --- a/tests/test_introspection.py +++ b/tests/test_introspection.py @@ -15,8 +15,8 @@ def test_list_simple_args() -> None: assert get_list_elt_type(list[str]) == str assert get_list_elt_type(list[int]) == int assert get_list_elt_type(list[bool]) == bool - assert get_list_elt_type(list[str | None]) == str | None # type: ignore[comparison-overlap] - assert get_list_elt_type(list[str | int]) == str | int # type: ignore[comparison-overlap] + assert get_list_elt_type(list[str | None]) == str | None + assert get_list_elt_type(list[str | int]) == str | int assert get_list_elt_type(list[tuple[str]]) == tuple[str] assert get_list_elt_type(list[list[str]]) == list[str] assert get_list_elt_type(list[dict[str, str]]) == dict[str, str] @@ -27,8 +27,8 @@ class Class: ... assert get_list_elt_type(list[Class]) == Class - assert get_list_elt_type(list[Class | None]) == Class | None # type: ignore[comparison-overlap] - assert get_list_elt_type(list[Class | int]) == Class | int # type: ignore[comparison-overlap] + assert get_list_elt_type(list[Class | None]) == Class | None + assert get_list_elt_type(list[Class | int]) == Class | int assert get_list_elt_type(list[tuple[Class]]) == tuple[Class] assert get_list_elt_type(list[list[Class]]) == list[Class] assert get_list_elt_type(list[dict[str, Class]]) == dict[str, Class] @@ -44,7 +44,7 @@ class SomethingElse(BaseModel): class OptionalList(BaseModel): __root__: list[str] | None - assert get_list_elt_type(ListOfMapsStorage) == int | dict[str, str] # type: ignore[comparison-overlap] + assert get_list_elt_type(ListOfMapsStorage) == int | dict[str, str] with pytest.raises(IntrospectionError): get_list_elt_type(OptionalList) @@ -57,8 +57,8 @@ def test_dict_simple_args() -> None: assert get_dict_value_type(dict[str, str]) == str assert get_dict_value_type(dict[str, int]) == int assert get_dict_value_type(dict[str, bool]) == bool - assert get_dict_value_type(dict[str, str | None]) == str | None # type: ignore[comparison-overlap] - assert get_dict_value_type(dict[str, str | int]) == str | int # type: ignore[comparison-overlap] + assert get_dict_value_type(dict[str, str | None]) == str | None + assert get_dict_value_type(dict[str, str | int]) == str | int assert get_dict_value_type(dict[str, tuple[str]]) == tuple[str] assert get_dict_value_type(dict[str, list[str]]) == list[str] assert get_dict_value_type(dict[str, dict[str, str]]) == dict[str, str] @@ -69,8 +69,8 @@ class Class: ... assert get_dict_value_type(dict[str, Class]) == Class - assert get_dict_value_type(dict[str, Class | None]) == Class | None # type: ignore[comparison-overlap] - assert get_dict_value_type(dict[str, Class | int]) == Class | int # type: ignore[comparison-overlap] + assert get_dict_value_type(dict[str, Class | None]) == Class | None + assert get_dict_value_type(dict[str, Class | int]) == Class | int assert get_dict_value_type(dict[str, tuple[Class]]) == tuple[Class] assert get_dict_value_type(dict[str, list[Class]]) == list[Class] assert get_dict_value_type(dict[str, dict[str, Class]]) == dict[str, Class] @@ -86,7 +86,7 @@ class SomethingElse(BaseModel): class OptionalDict(BaseModel): __root__: dict[str, str] | None - assert get_dict_value_type(DictOfMapsStorage) == int | dict[str, str] # type: ignore[comparison-overlap] + assert get_dict_value_type(DictOfMapsStorage) == int | dict[str, str] with pytest.raises(IntrospectionError): get_dict_value_type(OptionalDict) @@ -105,8 +105,8 @@ class Storage(BaseModel): assert get_dict_value_type(Storage, 'plain_str') == str assert get_dict_value_type(Storage, 'list_str') == list[str] assert get_dict_value_type(Storage, 'dict_of_lists') == dict[str, list[str]] - assert get_dict_value_type(Storage, 'optional_str') == str | None # type: ignore[comparison-overlap] - assert get_dict_value_type(Storage, 'union_arg') == str | int # type: ignore[comparison-overlap] + assert get_dict_value_type(Storage, 'optional_str') == str | None + assert get_dict_value_type(Storage, 'union_arg') == str | int def test_is_array() -> None: @@ -144,6 +144,6 @@ class OptionalStr(BaseModel): class ListOfMapsStorage(BaseModel): __root__: list[int | dict[str, str]] - assert extract_root_outer_type(OptionalStr) == str | None # type: ignore[comparison-overlap] + assert extract_root_outer_type(OptionalStr) == str | None # FIXME: left operand type: "Type[BaseModel]", right operand type: "Type[List[Any]]" assert extract_root_outer_type(ListOfMapsStorage) == list[int | dict[str, str]] # type: ignore[comparison-overlap]