From f986daea360d3ffdb49a60c28d150254bb76b251 Mon Sep 17 00:00:00 2001 From: Tim O'Guin Date: Tue, 23 Nov 2021 12:44:56 -0600 Subject: [PATCH] Expanded testing, plus new models for AWS Config, SQS, SNS (#16) Adds a number of improvements. Tests will need to be revisited, but I've added a decent base. Bumps to v0.1.1 ### Added - Adds support for generating a Graphviz diagram of an Organization with the new `OrganizationDataBuilder.to_dot()` function - Adds `DOT` as a supported output format for the `organization dump-all` command - Adds models for AWS Config notifications - Adds models for SQS and SNS messages - Adds methods to ModelBase to allow (de)serializing JSON or YAML strings - Adds ModelBase.from_dict() to initialize a model from a dict using dacite - Adds CodeQL analysis workflow for GitHub Actions ### Changed - breaking: Renames `organization dump-json` CLI command to `organization dump-all` - Moves buiders into the models namespace --- .editorconfig | 0 .github/CODE_OF_CONDUCT.md | 4 +- .github/workflows/ci.yml | 38 + .gitignore | 6 +- .pre-commit-config.yaml | 20 +- CHANGELOG.md | 11 +- Makefile | 7 +- README.md | 27 +- aws_data_tools/__init__.py | 3 + aws_data_tools/builders/__init__.py | 0 aws_data_tools/builders/organizations.py | 568 ------- aws_data_tools/cli/__init__.py | 187 ++- aws_data_tools/client/__init__.py | 4 + aws_data_tools/{ => client}/client.py | 31 +- aws_data_tools/client/tests/test_client.py | 25 + aws_data_tools/conftest.py | 30 + aws_data_tools/fixtures/account_paths.txt | 33 + aws_data_tools/fixtures/dynamodb_item.json | 28 + .../fixtures/dynamodb_item_serialized.json | 98 ++ aws_data_tools/fixtures/ou_paths.txt | 36 + aws_data_tools/models/__init__.py | 9 + aws_data_tools/models/base.py | 69 +- aws_data_tools/models/config.py | 297 ++++ aws_data_tools/models/organizations.py | 837 +++++++++- aws_data_tools/models/sns.py | 62 + aws_data_tools/models/sqs.py | 95 ++ .../config/item-change-notification.json | 12 + .../oversized-item-change-notification.json | 12 + .../tests/fixtures/config/periodic-rule.json | 12 + .../tests/fixtures/sns/notification.json | 31 + .../tests/fixtures/sqs/receive-message.json | 20 + aws_data_tools/models/tests/test_base.py | 86 + .../models/tests/test_organizations.py | 539 +++++++ aws_data_tools/utils.py | 53 - aws_data_tools/utils/__init__.py | 6 + aws_data_tools/utils/dynamodb.py | 50 + aws_data_tools/utils/tags.py | 20 + aws_data_tools/utils/tests/test_dynamodb.py | 68 + aws_data_tools/utils/tests/test_tags.py | 56 + aws_data_tools/utils/validators.py | 13 + docker-compose.yaml | 6 + docker-compose.yml | 7 - poetry.lock | 1420 ++++++++++------- pyproject.toml | 62 +- test.py | 85 + tests/test.py.bak | 30 - tests/test_builders_organization.py | 0 tests/test_client.py | 0 tests/test_models_base.py | 0 tests/test_models_organizations.py | 0 tests/test_utils.py | 0 51 files changed, 3729 insertions(+), 1384 deletions(-) delete mode 100644 .editorconfig delete mode 100644 aws_data_tools/builders/__init__.py delete mode 100644 aws_data_tools/builders/organizations.py create mode 100644 aws_data_tools/client/__init__.py rename aws_data_tools/{ => client}/client.py (66%) create mode 100644 aws_data_tools/client/tests/test_client.py create mode 100644 aws_data_tools/conftest.py create mode 100644 aws_data_tools/fixtures/account_paths.txt create mode 100644 aws_data_tools/fixtures/dynamodb_item.json create mode 100644 aws_data_tools/fixtures/dynamodb_item_serialized.json create mode 100644 aws_data_tools/fixtures/ou_paths.txt create mode 100644 aws_data_tools/models/config.py create mode 100644 aws_data_tools/models/sns.py create mode 100644 aws_data_tools/models/sqs.py create mode 100644 aws_data_tools/models/tests/fixtures/config/item-change-notification.json create mode 100644 aws_data_tools/models/tests/fixtures/config/oversized-item-change-notification.json create mode 100644 aws_data_tools/models/tests/fixtures/config/periodic-rule.json create mode 100644 aws_data_tools/models/tests/fixtures/sns/notification.json create mode 100644 aws_data_tools/models/tests/fixtures/sqs/receive-message.json create mode 100644 aws_data_tools/models/tests/test_base.py create mode 100644 aws_data_tools/models/tests/test_organizations.py delete mode 100644 aws_data_tools/utils.py create mode 100644 aws_data_tools/utils/__init__.py create mode 100644 aws_data_tools/utils/dynamodb.py create mode 100644 aws_data_tools/utils/tags.py create mode 100644 aws_data_tools/utils/tests/test_dynamodb.py create mode 100644 aws_data_tools/utils/tests/test_tags.py create mode 100644 aws_data_tools/utils/validators.py create mode 100644 docker-compose.yaml delete mode 100644 docker-compose.yml create mode 100644 test.py delete mode 100644 tests/test.py.bak delete mode 100644 tests/test_builders_organization.py delete mode 100644 tests/test_client.py delete mode 100644 tests/test_models_base.py delete mode 100644 tests/test_models_organizations.py delete mode 100644 tests/test_utils.py diff --git a/.editorconfig b/.editorconfig deleted file mode 100644 index e69de29..0000000 diff --git a/.github/CODE_OF_CONDUCT.md b/.github/CODE_OF_CONDUCT.md index c2bb3d8..9e33ccc 100644 --- a/.github/CODE_OF_CONDUCT.md +++ b/.github/CODE_OF_CONDUCT.md @@ -117,11 +117,11 @@ This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.0, available at [https://www.contributor-covenant.org/version/2/0/code_of_conduct.html][v2.0]. -Community Impact Guidelines were inspired by +Community Impact Guidelines were inspired by [Mozilla's code of conduct enforcement ladder][Mozilla CoC]. For answers to common questions about this code of conduct, see the FAQ at -[https://www.contributor-covenant.org/faq][FAQ]. Translations are available +[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at [https://www.contributor-covenant.org/translations][translations]. [homepage]: https://www.contributor-covenant.org diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ce5261c..d02a1de 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -127,3 +127,41 @@ jobs: - name: Run tests run: make test + + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + steps: + - name: Checkout repository + uses: actions/checkout@v2 + with: + fetch-depth: 1 + + - name: Set up Python + uses: actions/setup-python@v1 + with: + python-version: 3.9 + + - name: Install Poetry + uses: snok/install-poetry@v1.1.6 + with: + virtualenvs-create: false + virtualenvs-in-project: true + + - name: Restore cache venv + uses: syphar/restore-virtualenv@v1 + id: cache-venv + + - name: Initialize CodeQL + uses: github/codeql-action/init@v1.0.3 + with: + languages: python + setup_python_dependencies: false + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v1.0.3 diff --git a/.gitignore b/.gitignore index 5a1992f..854c419 100644 --- a/.gitignore +++ b/.gitignore @@ -308,7 +308,7 @@ dist .pnp.* # ------------------------------------------------------------------------------------- -# Python +# Python # ------------------------------------------------------------------------------------- # Byte-compiled / optimized / DLL files @@ -1101,3 +1101,7 @@ $RECYCLE.BIN/ # Windows shortcuts *.lnk + +# Misc + +*.bak diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b07ff74..0aad3b1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ # pre-commit run --all-files repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.4.0 + rev: v4.0.1 hooks: - id: check-ast - id: check-json @@ -9,24 +9,24 @@ repos: - id: check-toml - id: check-yaml - id: debug-statements - - id: detect-aws-credentials - args: ["--allow-missing-credentials"] - id: detect-private-key - id: end-of-file-fixer - id: mixed-line-ending - id: no-commit-to-branch - id: trailing-whitespace - repo: https://github.com/psf/black - rev: 21.6b0 + rev: 21.11b1 hooks: - id: black - repo: https://github.com/asottile/blacken-docs - rev: v1.10.0 + rev: v1.12.0 hooks: - id: blacken-docs additional_dependencies: [black==21.6b0] - - repo: https://github.com/flakehell/flakehell - rev: v.0.8.0 - hooks: - - name: Run flakehell static analysis tool - id: flakehell + # Disable flakehell for now due to import bug with newer flake8 + # https://github.com/flakehell/flakehell/issues/22 + # - repo: https://github.com/flakehell/flakehell + # rev: v.0.9.0 + # hooks: + # - name: Run flakehell static analysis tool + # id: flakehell diff --git a/CHANGELOG.md b/CHANGELOG.md index b891c89..8add354 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,15 +14,23 @@ the unreleased section to the section for the new release. No unreleased changes. +## [0.1.1] - 2021-11-23 + ### Added - Adds support for generating a Graphviz diagram of an Organization with the new `OrganizationDataBuilder.to_dot()` function - Adds `DOT` as a supported output format for the `organization dump-all` command +- Adds models for AWS Config notifications +- Adds models for SQS and SNS messages +- Adds methods to ModelBase to allow (de)serializing JSON or YAML strings +- Adds ModelBase.from_dict() to initialize a model from a dict using dacite +- Adds CodeQL analysis workflow for GitHub Actions ### Changed - breaking: Renames `organization dump-json` CLI command to `organization dump-all` +- Moves buiders into the models namespace ## [0.1.0-beta2] - 2021-06-16 @@ -78,7 +86,8 @@ Initial alpha release These Markdown anchors provide a link to the diff for each release. They should be updated any time a new release is cut. --> -[Unreleased]: https://github.com/timoguin/aws-org-tools-py/compare/v0.1.0-beta-2...HEAD +[Unreleased]: https://github.com/timoguin/aws-org-tools-py/compare/v0.1.1...HEAD +[0.1.1]: https://github.com/timoguin/aws-org-tools-py/compare/v0.1.0-beta2...v0.1.1 [0.1.0-beta2]: https://github.com/timoguin/aws-org-tools-py/compare/v0.1.0-beta1...v0.1.0-beta2 [0.1.0-beta1]: https://github.com/timoguin/aws-org-tools-py/compare/v0.1.0-alpha4...v0.1.0-beta1 [0.1.0-alpha4]: https://github.com/timoguin/aws-org-tools-py/releases/tag/v0.1.0-alpha4 diff --git a/Makefile b/Makefile index f8df5e2..51be01d 100644 --- a/Makefile +++ b/Makefile @@ -31,8 +31,9 @@ shellcmd: ${VENV_DIR} lint: ${VENV_DIR} @echo "Running the black code formatter" @poetry run black ${ARGS} . - @echo "Running flakehell plugins" - @poetry run flakehell lint aws_data_tools + # flakehell currently disabled, see .pre-commit-config.yaml notesk + # @echo "Running flakehell plugins" + # @poetry run flakehell lint aws_data_tools .PHONY: lint-docs lint-docs: ${VENV_DIR} @@ -72,7 +73,7 @@ build: ${VENV_DIR} .PHONY: test ## Run the test suite test: ${VENV_DIR} @echo "Running test suite" - @poetry run pytest --suppress-no-test-exit-code + @poetry run pytest --cov ${OPTS} ${ARGS} # Ensures the Python venv exists and has dependencies installed ${VENV_DIR}: diff --git a/README.md b/README.md index 89f5277..fedb542 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,8 @@ # AWS Data Tools -[![Actions Status][gh-actions-badge]][gh-actions-link] +[![Actions CI Status][gh-actions-ci-badge]][gh-actions-ci-link] +[![Actions CodeQL Status][gh-actions-codeql-badge]][gh-actions-codeql-link] [![PyPI][pypi-badge]][pypi-link] [![License][license-badge]][license-link] @@ -32,7 +33,7 @@ To dump a data representation of an AWS Organization, you can do the following u the builder: ```python -from aws_data_tools.builders.organizations import OrganizationDataBuilder +from aws_data_tools.models.organizations import OrganizationDataBuilder odb = OrganizationDataBuilder(init_all=True) organization = odb.as_json() @@ -41,7 +42,7 @@ organization = odb.as_json() Here is how to do the same thing with the CLI: ``` -$ awsdata organization dump-json +$ awsdata organization dump-all ``` ## Usage @@ -54,9 +55,9 @@ abstract some of these operations. ### Builders While it is possible to directly utilize and interact with the data models, probably -the largest benefit is the [builders](aws_data_tools/builders) package. It abstracts -any API operations and data transformations required to build data models. The models -can then be serialized to dicts, as well as JSON or YAML strings. +the largest benefit are the builders. They abstract any API operations and data +transformations required to build data models. The models can then be serialized to +dicts, as well as DOT, JSON, or YAML strings. A full model of an AWS Organization can be constructed using the `OrganizationDataBuilder` class. It handles recursing the organizational tree and @@ -67,9 +68,9 @@ The simplest example pulls all supported organizational data and creates the rel data models: ```python -from aws_data_tools.builders.organizations import OrganizationDataBuilder as odb +from aws_data_tools.models.organizations import OrganizationDataBuilder -org = odb(init_all=True) +odb = OrganizationDataBuilder(init_all=True) ``` Note that this makes many API calls to get this data. For example, every OU, policy, @@ -84,9 +85,9 @@ requires 316 API calls! That's why this library was created. For more control over the process, you can init each set of components as desired: ```python -from aws_data_tools.builders.organizations import OrganizationDataBuilder as odb +from aws_data_tools.models.organizations import OrganizationDataBuilder -org = odb() +org = OrganizationDataBuilder() org.init_connection() org.init_organization() org.init_root() @@ -274,8 +275,10 @@ View the [Contributing Guide](.github/CONTRIBUTING.md) to learn about giving bac -[gh-actions-badge]: https://github.com/timoguin/aws-data-tools-py/actions/workflows/ci.yml/badge.svg -[gh-actions-link]: https://github.com/timoguin/aws-data-tools-py/actions +[gh-actions-ci-badge]: https://github.com/timoguin/aws-data-tools-py/actions/workflows/ci.yml/badge.svg +[gh-actions-ci-link]: https://github.com/timoguin/aws-data-tools-py/actions/workflows/ci.yml +[gh-actions-codeql-badge]: https://github.com/timoguin/aws-data-tools-py/actions/workflows/codeql-analysis.yml/badge.svg +[gh-actions-codeql-link]: https://github.com/timoguin/aws-data-tools-py/actions/workflows/codeql-analysis.yml [license-badge]: https://img.shields.io/github/license/timoguin/aws-data-tools-py.svg [license-link]: https://github.com/timoguin/aws-data-tools-py/blob/main/LICENSE [pypi-badge]: https://badge.fury.io/py/aws-data-tools.svg diff --git a/aws_data_tools/__init__.py b/aws_data_tools/__init__.py index 711ae09..3e19298 100644 --- a/aws_data_tools/__init__.py +++ b/aws_data_tools/__init__.py @@ -1,6 +1,9 @@ """ A library for working with data from AWS APIs """ +# flake8: noqa: F401 + +from . import client, models, utils __VERSION__ = "0.1.0-beta2" diff --git a/aws_data_tools/builders/__init__.py b/aws_data_tools/builders/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/aws_data_tools/builders/organizations.py b/aws_data_tools/builders/organizations.py deleted file mode 100644 index bd95e66..0000000 --- a/aws_data_tools/builders/organizations.py +++ /dev/null @@ -1,568 +0,0 @@ -""" -Builder utilies for working with data from AWS Organizations APIs -""" - -from dataclasses import dataclass, field, InitVar -from typing import Any, Dict, List, Union - -from graphviz import Digraph, unflatten - -from ..client import APIClient -from ..models.base import ModelBase -from ..utils import query_tags - -from ..models.organizations import ( - Account, - EffectivePolicy, - Organization, - OrganizationalUnit, - ParChild, - Policy, - PolicySummary, - PolicySummaryForTarget, - PolicyTargetSummary, - PolicyTypeSummary, - Root, -) - - -_SERVICE_NAME = "organizations" -_OU_MAXDEPTH = 5 - - -@dataclass -class OrganizationDataBuilder(ModelBase): - """ - Performs read-only operations against the Organizations APIs to construct data - models of organizations objects. It can populate data for most supported objects: - - - Organization (the org itself) - - Roots - - Organizational Units - - Policies - - Accounts - - Effective Policies - - Tags - - It currently doesn't support getting data about delegated administrators or - services, handshakes, account creation statuses, or AWS service integrations. - - Provides serialization to dicts and JSON. - """ - - client: APIClient = field(default=None, repr=False) - dm: Organization = field(default_factory=Organization) - - # Used by __post_init__() to determine what data to initialize (default is none) - init_all: InitVar[bool] = field(default=False) - init_connection: InitVar[bool] = field(default=True) - init_organization: InitVar[bool] = field(default=False) - init_root: InitVar[bool] = field(default=False) - init_policies: InitVar[bool] = field(default=False) - init_policy_tags: InitVar[bool] = field(default=False) - init_ous: InitVar[bool] = field(default=False) - init_ou_tags: InitVar[bool] = field(default=False) - init_accounts: InitVar[bool] = field(default=False) - init_account_tags: InitVar[bool] = field(default=False) - init_policy_targets: InitVar[bool] = field(default=False) - init_effective_policies: InitVar[bool] = field(default=False) - - include_account_parents: bool = field(default=False) - - def Connect(self): - """Initialize an authenticated session""" - if self.client is None: - self.client = APIClient(_SERVICE_NAME) - - def api(self, func: str, **kwargs) -> Union[List[Dict[str, Any]], Dict[str, Any]]: - """Make arbitrary API calls with the session client""" - if self.client is None: - self.Connect() - return self.client.api(func, **kwargs) - - def to_dot(self) -> str: - """Return the organization as a GraphViz DOT diagram""" - graph = Digraph("Organization", filename="organization.dot") - nodes = [] - nodes.append(self.dm.root) - nodes.extend(self.dm.organizational_units) - nodes.extend(self.dm.accounts) - for node in nodes: - if getattr(node, "parent", None) is None: - continue - shape = None - if isinstance(node, Root): - shape = "circle" - elif isinstance(node, OrganizationalUnit): - shape = "box" - elif isinstance(node, Account): - shape = "ellipse" - else: - continue - graph.node(node.id, label=node.name, shape=shape) - graph.edge(node.parent.id, node.id) - return unflatten( - graph.source, - stagger=10, - fanout=10, - chain=10, - ) - - def __e_organization(self) -> Dict[str, str]: - """Extract org description data from the DescribeOrganization API""" - return self.api("describe_organization").get("organization") - - def __t_organization(self) -> Dict[str, Union[str, List[PolicySummary]]]: - """Deserialize org description data and perform any transformations""" - data = {} - for k, v in self.__e_organization().items(): - # Convert avail policy types to PolicyTypeSummary objects - if k == "available_policy_types": - data[k] = [PolicyTypeSummary(**pol) for pol in v] - continue - data[k] = v - return data - - def __l_organization(self) -> None: - """Init the Organization instance on the dm field""" - self.dm = Organization(**self.__t_organization()) - - def fetch_organization(self) -> None: - """Initialize the organization object with minimal data""" - self.__l_organization() - - def __e_roots(self) -> List[Dict[str, Any]]: - """Extract org roots from the ListRoots API""" - return self.api("list_roots") - - def __t_roots(self) -> Root: - """Deserialize and transform org roots data into a single Root object""" - return [Root(**root) for root in self.__e_roots()][0] - - def __l_roots(self) -> None: - """Init the Root instance of the dm.root field""" - self.dm.root = self.__t_roots() - - def fetch_root(self) -> None: - """Initialize the organization's root object""" - if self.dm.id is None: - self.fetch_organization() - self.__l_roots() - - def __e_policies(self) -> List[Dict[str, Any]]: - """Extract organization policy data from ListPolicies and DescribePolicy""" - ret = [] - for p in self.dm.available_policy_types: - policies = [] - p_summaries = self.api("list_policies", filter=p.type) - for p_summary in p_summaries: - p_detail = self.api("describe_policy", policy_id=p_summary["id"]).get( - "policy" - ) - policies.append(p_detail) - ret.extend(policies) - return ret - - def __t_policies(self) -> List[Policy]: - """Deserialize list of policy dicts into a list of Policy objects""" - ret = [] - for p in self.__e_policies(): - p_summary = PolicySummary(**p["policy_summary"]) - policy = Policy(policy_summary=p_summary, content=p["content"]) - ret.append(policy) - return ret - - def __l_policies(self) -> None: - """Load policy objects into dm.policies field""" - self.dm.policies = self.__t_policies() - if self.dm._policy_index_map is None: - self.dm._policy_index_map = {} - for i, policy in enumerate(self.dm.policies): - self.dm._policy_index_map[policy.policy_summary.id] = i - - def fetch_policies(self) -> None: - """Initialize the list of Policy objects in the organization""" - self.__l_policies() - - def __e_policy_targets_for_id(self, policy_id: str) -> List[PolicyTargetSummary]: - """Extract a list of policy targets from ListTargetsForPolicy""" - return self.api("list_targets_for_policy", policy_id=policy_id) - - def __e_policy_targets(self) -> Dict[str, List[Dict[str, Any]]]: - """Extract target summary data for all policies""" - ret = {} - if self.dm.policies is None: - self.fetch_policies() - for policy in self.dm.policies: - pid = policy.policy_summary.id - data = self.__e_policy_targets_for_id(policy_id=pid) - for target in data: - if ret.get(pid) is None: - ret[pid] = [] - ret[pid].append(target) - return ret - - def __lookup_obj_index(self, obj_type: str, obj_id: str) -> int: - """Lookup the list index of a type of node in the data model""" - map_field = None - if obj_type == "account": - map_field = self.dm._account_index_map - elif obj_type == "ou" or obj_type == "organizational_unit": - map_field = self.dm._ou_index_map - elif obj_type == "policy": - map_field = self.dm._policy_index_map - return map_field[obj_id] - - def __lookup_account_index(self, account_id: str) -> int: - """Lookup the index of an account object in the dm.accounts field""" - return self.__lookup_obj_index("account", account_id) - - def __lookup_ou_index(self, ou_id: str) -> int: - """Lookup the list index of an OU in the dm.organizational_units field""" - return self.__lookup_obj_index("ou", ou_id) - - def __lookup_policy_index(self, policy_id: str) -> int: - """Lookup the list index of an account in dm.accounts""" - return self.__lookup_obj_index("policy", policy_id) - - def __t_policy_targets( - self, - ) -> Dict[str, Dict[str, List[Union[PolicySummaryForTarget, PolicySummary]]]]: - """ - Deserialize policy targets into a dict of PolicySummaryForTarget and - PolicyTargetSummary objects - """ - data = {} - for pid, p_targets in self.__e_policy_targets().items(): - p_index = self.__lookup_policy_index(pid) - p_type = self.dm.policies[p_index].policy_summary.type - for p_target in p_targets: - p_summary_for_target = PolicySummaryForTarget(id=pid, type=p_type) - if data.get(pid) is None: - data[pid] = { - "policy_index": p_index, - "policy_summary_for_targets": p_summary_for_target, - "target_details": [], - } - data[pid]["target_details"].append(PolicyTargetSummary(**p_target)) - return data - - def __l_policy_targets(self) -> None: - """Load policy target objects and data into the data model""" - data = self.__t_policy_targets() - for pid, d in data.items(): - p_index = d["policy_index"] - # Update "targets" for Policy objects - self.dm.policies[p_index].targets = d["target_details"] - # Update "policies" for target objects - for target in d["target_details"]: - if target.type == "ROOT": - if self.dm.root.policies is None: - self.dm.root.policies = [] - self.dm.root.policies.append(d["policy_summary_for_targets"]) - elif target.type == "ORGANIZATIONAL_UNIT": - ou_index = self.__lookup_ou_index(target.target_id) - if self.dm.organizational_units[ou_index].policies is None: - self.dm.organizational_units[ou_index].policies = [] - self.dm.organizational_units[ou_index].policies.append( - d["policy_summary_for_targets"] - ) - elif target.type == "ACCOUNT": - acct_index = self.__lookup_account_index(target.target_id) - if self.dm.accounts[acct_index].policies is None: - self.dm.accounts[acct_index].policies = [] - self.dm.accounts[acct_index].policies.append( - d["policy_summary_for_targets"] - ) - - def fetch_policy_targets(self) -> None: - """Initialize the list of Policy objects in the organization""" - self.__l_policy_targets() - - def __e_ous_recurse( - self, - parents: List[ParChild] = None, - ous: List[OrganizationalUnit] = None, - depth: int = 0, - maxdepth: int = _OU_MAXDEPTH, - ) -> List[OrganizationalUnit]: - """Recurse the org tree and return a list of OU dicts""" - if parents is None: - if self.dm.root is None: - self.fetch_root() - parents = [self.dm.root.to_parchild()] - if self.dm._parent_child_tree is None: - self.dm._parent_child_tree = {} - if self.dm._child_parent_tree is None: - self.dm._child_parent_tree = {} - if self.dm.organizational_units is None: - self.dm.organizational_units = [] - if depth == maxdepth or len(parents) == 0: - return ous - if ous is None: - ous = [] - next_parents = [] - for parent in parents: - if self.dm._parent_child_tree.get(parent.id) is None: - self.dm._parent_child_tree[parent.id] = [] - ou_results = self.api( - "list_organizational_units_for_parent", parent_id=parent.id - ) - for ou_result in ou_results: - ou = OrganizationalUnit(parent=parent, **ou_result) - ou_to_parchild = ou.to_parchild() - self.dm._parent_child_tree[parent.id].append(ou_to_parchild) - self.dm._child_parent_tree[ou.id] = parent - ous.append(ou) - next_parents.append(ou_to_parchild) - acct_results = self.api("list_accounts_for_parent", parent_id=parent.id) - for acct_result in acct_results: - account = Account(parent=parent, **acct_result) - self.dm._parent_child_tree[parent.id].append(account.to_parchild()) - self.dm._child_parent_tree[account.id] = parent - return self.__e_ous_recurse(parents=next_parents, ous=ous, depth=depth + 1) - - def __e_ous(self) -> List[OrganizationalUnit]: - """Extract the OU tree recursively, including OUs and child accounts""" - return self.__e_ous_recurse() - - def __t_ous(self) -> List[OrganizationalUnit]: - """Transform OU objects by populating child relationships""" - data = self.__e_ous() - ous = [] - for ou in data: - ou.children = self.dm._parent_child_tree[ou.id] - ous.append(ou) - return ous - - def __l_ous(self) -> None: - """Load deserialized org tree into data models (root and OUs)""" - ous = self.__t_ous() - self.dm.root.children = self.dm._parent_child_tree[self.dm.root.id] - self.dm.organizational_units = ous - if self.dm._ou_index_map is None: - self.dm._ou_index_map = {} - for i, ou in enumerate(self.dm.organizational_units): - self.dm._ou_index_map[ou.id] = i - - def fetch_ous(self) -> None: - """Recurse the org tree and populate relationship data for nodes""" - self.__l_ous() - - def __e_accounts(self) -> List[Dict[str, Any]]: - """Extract the list of accounts in the org""" - return self.api("list_accounts") - - def __t_accounts(self) -> List[Account]: - """Transform account data into a list of Account objects""" - return [Account(**account) for account in self.__e_accounts()] - - def __l_accounts(self, include_parents: bool = False) -> None: - """Load account objects with parent relationship data""" - data = self.__t_accounts() - accounts = [] - for result in data: - account = result - if include_parents or self.include_account_parents: - if self.dm._child_parent_tree is None: - self.fetch_ous() - account.parent = self.dm._child_parent_tree[account.id] - accounts.append(account) - self.dm.accounts = accounts - if self.dm._account_index_map is None: - self.dm._account_index_map = {} - for i, account in enumerate(self.dm.accounts): - self.dm._account_index_map[account.id] = i - - def fetch_accounts(self, **kwargs) -> None: - """Initialize the list of Account objects in the organization""" - self.__l_accounts(**kwargs) - - def __e_effective_policies_for_target( - self, target_id: str - ) -> List[EffectivePolicy]: - """Extract a list of effective policies for a target node""" - effective_policies = [] - for p in self.dm.available_policy_types: - # SCPs aren't supported for effective policies - if p.type == "SERVICE_CONTROL_POLICY": - continue - data = self.api( - "describe_effective_policy", policy_type=p.type, target_id=target_id - ) - effective_policies.append(data) - return effective_policies - - def __e_effective_policies( - self, account_ids: List[str] = None - ) -> Dict[int, List[Dict[str, Any]]]: - """Extract the effective policies for accounts or a list of account IDs""" - ret = {} - if self.dm.accounts is None: - self.fetch_accounts() - if account_ids is None: - account_ids = [account.id for account in self.dm.accounts] - for account_id in account_ids: - ret[account_id] = self.__e_effective_policies_for_target(account_id) - return ret - - def __t_effective_policies(self, **kwargs) -> Dict[int, List[EffectivePolicy]]: - """Transform effective policy data into a list of EffectivePolicy""" - return [EffectivePolicy(**d) for d in self.__e_effective_policies()] - - def __l_effective_policies(self, **kwargs) -> None: - """Load effective policy objects into the account tree""" - for acct_id, effective_policies in self.__e_effective_policies().items(): - acct_index = self.__lookup_account_index(acct_id) - self.dm.accounts[acct_index].effective_policies = effective_policies - - def fetch_effective_policies(self, **kwargs) -> None: - """Initialize effective policy data for accounts in the org""" - self.__l_effective_policies(**kwargs) - - def __et_tags(self, resource_ids: List[str]) -> Dict[str, Dict[str, str]]: - """Extract and transform tags for a list of resource IDs""" - ret = {} - for resource_id in resource_ids: - ret[resource_id] = query_tags(self.client, resource_id) - return ret - - def __l_account_tags(self, account_ids: List[str] = None, **kwargs) -> None: - """Load tags for accounts in the organization""" - if self.dm.accounts is None: - self.fetch_accounts() - if account_ids is None: - account_ids = [account.id for account in self.dm.accounts] - data = self.__et_tags(resource_ids=account_ids) - for acct_id, tags in data.items(): - acct_index = self.__lookup_account_index(acct_id) - self.dm.accounts[acct_index].tags = tags - - def fetch_account_tags(self, **kwargs) -> None: - """Initialize tags for accounts in the organization""" - self.__l_account_tags(**kwargs) - - def __l_ou_tags(self, ou_ids: List[str] = None) -> None: - """Load tags for OUs in the organization""" - if self.dm.organizational_units is None: - self.fetch_organizational_units() - if ou_ids is None: - ou_ids = [ou.id for ou in self.dm.organizational_units] - data = self.__et_tags(resource_ids=ou_ids) - for ou_id, tags in data.items(): - ou_index = self.__lookup_ou_index(ou_id) - self.dm.organizational_units[ou_index].tags = tags - - def fetch_ou_tags(self, **kwargs) -> None: - """Initialize tags for OUs in the organization""" - self.__l_ou_tags(**kwargs) - - def __l_root_tags(self) -> None: - """Load tags for the organization root""" - if self.dm.root is None: - self.fetch_root() - data = self.__et_tags(resource_ids=[self.dm.root.id]) - self.dm.root.tags = data[self.dm.root.id] - - def fetch_root_tags(self) -> None: - """Initialize tags for the organization root""" - self.__l_root_tags() - - def __l_policy_tags(self, policy_ids: List[str] = None) -> None: - """Load tags for policies in the organization""" - if self.dm.policies is None: - self.fetch_policies() - if policy_ids is None: - policy_ids = [ - policy.policy_summary.id - for policy in self.dm.policies - if not policy.policy_summary.aws_managed - ] - data = self.__et_tags(resource_ids=policy_ids) - for policy_id, tags in data.items(): - policy_index = self.__lookup_policy_index(policy_id) - self.dm.policies[policy_index].tags = tags - - def fetch_policy_tags(self, **kwargs) -> None: - """Initialize tags for policies in the organization""" - self.__l_policy_tags(**kwargs) - - def fetch_all_tags(self) -> None: - """Initialize and populate tags for all taggable objects in the organization""" - self.fetch_root_tags() - self.fetch_policy_tags() - self.fetch_ou_tags() - self.fetch_account_tags() - - def to_dict(self, **kwargs) -> Dict[str, Any]: - """Return the data model for the organization as a dictionary""" - return self.dm.to_dict(**kwargs) - - def to_dynamodb(self, **kwargs) -> Dict[str, Any]: - """Return the data model for the organization as a DynamoDB Item""" - return self.dm.to_dynamodb(**kwargs) - - def to_json(self, **kwargs) -> str: - """Return the data model for the organization as a JSON string""" - return self.dm.to_json(**kwargs) - - def to_yaml(self, **kwargs) -> str: - """Return the data model for the organization as a YAML string""" - return self.dm.to_yaml(**kwargs) - - def fetch_all(self) -> None: - """Initialize all data for nodes and edges in the organization""" - self.Connect() - self.fetch_organization() - self.fetch_root() - self.fetch_root_tags() - self.fetch_policies() - self.fetch_policy_tags() - self.fetch_ous() - self.fetch_ou_tags() - self.fetch_accounts() - self.fetch_account_tags() - self.fetch_policy_targets() - self.fetch_effective_policies() - - def __post_init__( - self, - init_all: bool, - init_connection: bool, - init_organization: bool, - init_root: bool, - init_policies: bool, - init_policy_tags: bool, - init_ous: bool, - init_ou_tags: bool, - init_accounts: bool, - init_account_tags: bool, - init_policy_targets: bool, - init_effective_policies: bool, - ) -> None: - """Initialize all or selected data for the organization""" - if init_all: - self.fetch_all() - return - if init_connection: - self.Connect() - if init_organization: - self.fetch_organization() - if init_root: - self.fetch_root() - if init_policies: - self.fetch_policies() - if init_policy_tags: - self.fetch_policy_tags() - if init_ous: - self.fetch_ous() - if init_ou_tags: - self.fetch_ou_tags() - if init_accounts: - self.fetch_accounts() - if init_account_tags: - self.fetch_account_tags() - if init_policy_targets: - self.fetch_policy_targets() - if init_effective_policies: - self.fetch_effective_policies() diff --git a/aws_data_tools/cli/__init__.py b/aws_data_tools/cli/__init__.py index 8d5a539..e627f4f 100644 --- a/aws_data_tools/cli/__init__.py +++ b/aws_data_tools/cli/__init__.py @@ -2,52 +2,115 @@ CLI interface for working with data from AWS APIs """ -from itertools import zip_longest -from json import dumps as json_dumps -from json import load as json_load -from re import fullmatch -from traceback import format_exc -from typing import Any, Dict, List +import itertools +import json +import os +import re +import traceback +from typing import Any from botocore.exceptions import ClientError, NoCredentialsError -from click import ( - echo, - group, - open_file, - option, - pass_context, - secho, - version_option, - Choice, -) +import click +import click_completion +import click_completion.core from .. import get_version from ..client import APIClient -from ..builders.organizations import OrganizationDataBuilder -from ..models.organizations import Account +from ..models.organizations import Account, OrganizationDataBuilder -from ..utils import ( +from ..utils.dynamodb import ( deserialize_dynamodb_items, prepare_dynamodb_batch_put_request, ) +def custom_startswith(string, incomplete): + """A custom completion matching that supports case insensitive matching""" + if os.environ.get("_CLICK_COMPLETION_COMMAND_CASE_INSENSITIVE_COMPLETE"): + string = string.lower() + incomplete = incomplete.lower() + return string.startswith(incomplete) + + +click_completion.core.startswith = custom_startswith +click_completion.init() + +completion_cmd_help = """Shell completion for click-completion-command +Available shell types: + \b + %s + Default type: auto + """ % "\n ".join( + "{:<12} {}".format(k, click_completion.core.shells[k]) + for k in sorted(click_completion.core.shells.keys()) +) + CONTEXT_SETTINGS = {"help_option_names": ["-h", "--help"]} -@group(context_settings=CONTEXT_SETTINGS) -@version_option(version=get_version()) -@option("--debug", "-d", default=False, is_flag=True, help="Enable debug mode") -@pass_context +@click.group(context_settings=CONTEXT_SETTINGS) +@click.version_option(version=get_version()) +@click.option("--debug", "-d", default=False, is_flag=True, help="Enable debug mode") +@click.pass_context def cli(ctx, debug): """A command-line tool to interact with data from AWS APIs""" ctx.ensure_object(dict) ctx.obj["DEBUG"] = debug +@cli.group(hidden=True) +@click.option( + "-i", + "--case-insensitive/--no-case-insensitive", + default=True, + help="Case insensitive completion", +) +@click.pass_context +def completion(ctx, case_insensitive, help=completion_cmd_help): + """Commands for shell completion""" + extra_env = ( + {"_CLICK_COMPLETION_COMMAND_CASE_INSENSITIVE_COMPLETE": "ON"} + if case_insensitive + else {} + ) + ctx.obj["EXTRA_ENV"] = extra_env + + +@completion.command() +@click.pass_context +def bash(ctx): + """Print BASH completion script for sourcing""" + extra_env = ctx.obj["EXTRA_ENV"] + click.echo(click_completion.core.get_code("bash", extra_env=extra_env)) + + +@completion.command() +@click.pass_context +def zsh(ctx): + """Print ZSH completion script for sourcing""" + extra_env = ctx.obj["EXTRA_ENV"] + click.echo(click_completion.core.get_code("zsh", extra_env=extra_env)) + + +@completion.command() +@click.pass_context +def fish(ctx): + """Print fish completion script for sourcing""" + extra_env = ctx.obj["EXTRA_ENV"] + click.echo(click_completion.core.get_code("fish", extra_env=extra_env)) + + +@completion.command() +@click.pass_context +def powershell(ctx): + """Print Powershell completion script for sourcing""" + extra_env = ctx.obj["EXTRA_ENV"] + click.echo(click_completion.core.get_code("powershell", extra_env=extra_env)) + + @cli.group() -@pass_context +@click.pass_context def organization(ctx): """Interact with data from AWS Organizations APIs""" ctx.ensure_object(dict) @@ -56,42 +119,46 @@ def organization(ctx): def handle_error(ctx, err_msg, tb=None): """Takes an error message and an optional traceback, prints them, and quits""" if err_msg is not None: - secho(err_msg, fg="red") + click.secho(err_msg, fg="red") if tb is not None: - echo() - secho(tb, fg="red") + click.echo() + click.secho(tb, fg="red") ctx.exit(1) @organization.command(short_help="Dump org data as JSON") -@option( +@click.option( "--format", "-f", "format_", default="JSON", - type=Choice(["DOT", "JSON", "YAML"], case_sensitive=False), + type=click.Choice(["DOT", "JSON", "YAML"], case_sensitive=False), help="The output format for the data", ) -@option( +@click.option( "--no-accounts", default=False, is_flag=True, help="Exclude account data from the model", ) -@option( +@click.option( "--no-policies", default=False, is_flag=True, help="Exclude policy data from the model", ) -@option("--out-file", "-o", help="File path to write data instead of stdout") -@pass_context +@click.option("--out-file", "-o", help="File path to write data instead of stdout") +@click.option( + "--flatten", default=False, is_flag=True, help="Flatten nested model keys" +) +@click.pass_context def dump_all( - ctx: Dict[str, Any], + ctx: dict[str, Any], format_: str, no_accounts: bool, no_policies: bool, out_file: str, + flatten: bool, ) -> None: """Dump a data representation of the organization""" err_msg = None @@ -118,48 +185,50 @@ def dump_all( s_func = odb.to_dot if out_file is None: out_file = "-" - with open_file(out_file, mode="wb") as f: - f.write(bytes(s_func(), "utf-8")) + with click.open_file(out_file, mode="wb") as f: + f.write(bytes(s_func(flatten=flatten), "utf-8")) except ClientError as exc_info: err_msg = f"Service Error: {str(exc_info)}" except NoCredentialsError: err_msg = "Error: Unable to locate AWS credentials" except Exception as exc_info: err_msg = f"Unknown Error: {str(exc_info)}" - tb = format_exc() + tb = traceback.format_exc() handle_error(ctx, err_msg, tb) @organization.command(short_help="Query for account details") -@option("--accounts", "-a", required=True, help="A space-delimited list of account IDs") -@option( +@click.option( + "--accounts", "-a", required=True, help="A space-delimited list of account IDs" +) +@click.option( "--include-effective-policies", default=False, is_flag=True, help="Include effective policies for the accounts", ) -@option( +@click.option( "--include-parents", default=False, is_flag=True, help="Include parent data for the accounts", ) -@option( +@click.option( "--include-tags", default=False, is_flag=True, help="Include tags applied to the accounts", ) -@option( +@click.option( "--include-policies", default=False, is_flag=True, help="Include policies attached to the accounts", ) -@pass_context +@click.pass_context def lookup_accounts( - ctx: Dict[str, Any], - accounts: List[str], + ctx: dict[str, Any], + accounts: list[str], include_parents: bool, include_effective_policies: bool, include_policies: bool, @@ -174,7 +243,7 @@ def lookup_accounts( account_ids = [] invalid_ids = [] for account in accounts_unvalidated: - if fullmatch(r"^[\d]{12}$", account) is not None: + if re.fullmatch(r"^[\d]{12}$", account) is not None: account_ids.append(account) else: invalid_ids.append(account) @@ -210,24 +279,24 @@ def lookup_accounts( for acct in odb.dm.accounts if acct.id in account_ids ] - echo(json_dumps(data, default=str)) + click.echo(json.dumps(data, default=str)) @organization.command() -@option("--table", "-t", required=True, help="Name of the DynamoDB table") -@option( +@click.option("--table", "-t", required=True, help="Name of the DynamoDB table") +@click.option( "--in-file", "-i", required=True, help="File containing a list of Account objects" ) -@pass_context +@click.pass_context def write_accounts_to_dynamodb( - ctx: Dict[str, Any], + ctx: dict[str, Any], table: str, in_file: str, ) -> None: """Write a list of accounts to a DynamoDB table""" data = None - with open_file(in_file, mode="r") as f: - data = json_load(f) + with click.open_file(in_file, mode="r") as f: + data = json.load(f) odb = OrganizationDataBuilder() if not isinstance(data, list): handle_error(err_msg="Data is not a list") @@ -236,20 +305,20 @@ def write_accounts_to_dynamodb( client = APIClient("dynamodb") ret = {"responses": []} # Group into batches of 25 since that's the max for BatchWriteItem - for group_ in zip_longest(*[iter(accounts)] * 25): + for group_ in itertools.zip_longest(*[iter(accounts)] * 25): items = prepare_dynamodb_batch_put_request(table=table, items=group_) res = client.api("batch_write_item", request_items=items) # TODO: Add handling of any "UnprocessedItems" in the response. Add retry with # exponential backoff. ret["responses"].append(res) - echo(json_dumps(ret)) + click.echo(json.dumps(ret)) @organization.command() -@option("--table", "-t", required=True, help="Name of the DynamoDB table") -@pass_context +@click.option("--table", "-t", required=True, help="Name of the DynamoDB table") +@click.pass_context def read_accounts_from_dynamodb( - ctx: Dict[str, Any], + ctx: dict[str, Any], table: str, ) -> None: """Fetch a list of accounts from a DynamoDB table""" @@ -258,4 +327,4 @@ def read_accounts_from_dynamodb( accounts = [Account(**account) for account in deserialize_dynamodb_items(res)] odb = OrganizationDataBuilder() odb.dm.accounts = accounts - echo(odb.to_json(field_name="accounts")) + click.echo(odb.to_json(field_name="accounts")) diff --git a/aws_data_tools/client/__init__.py b/aws_data_tools/client/__init__.py new file mode 100644 index 0000000..67f79a1 --- /dev/null +++ b/aws_data_tools/client/__init__.py @@ -0,0 +1,4 @@ +"""Package containing classes for interacting with AWS APIs""" +# flake8: noqa: F401 + +from .client import APIClient diff --git a/aws_data_tools/client.py b/aws_data_tools/client/client.py similarity index 66% rename from aws_data_tools/client.py rename to aws_data_tools/client/client.py index 12e5098..d69d3ce 100644 --- a/aws_data_tools/client.py +++ b/aws_data_tools/client/client.py @@ -2,19 +2,22 @@ Module containing classes that abstract interactions with boto3 sessions and clients """ -from dataclasses import dataclass, field -from typing import Any, Dict, List, Union +from dataclasses import InitVar, dataclass, field +import logging +from typing import Any, Union from boto3.session import Session from botocore.client import BaseClient from humps import depascalize, pascalize +logging.getLogger(__name__).addHandler(logging.NullHandler()) + _DEFAULT_PAGINATION_CONFIG = {"MaxItems": 500} @dataclass -class APIClient: +class ApiClient: """ Service client for interacting with named AWS API services. When initialized, it establishes a boto3 session and client for the specified service. Loads @@ -22,9 +25,13 @@ class APIClient: service: str client: BaseClient = field(default=None) - session: Session = field(default_factory=Session) + session: Session = field(default=None) + + # Allow customizing the session + client_kwargs: InitVar[dict[str, Any]] = field(default=None) + session_kwargs: InitVar[dict[str, Any]] = field(default=None) - def api(self, func: str, **kwargs) -> Union[Dict[str, Any], List[Dict[str, Any]]]: + def api(self, func: str, **kwargs) -> Union[dict[str, Any], list[dict[str, Any]]]: """ Call a named API action by string. All arguments to the action should be passed as kwargs. The returned data has keys normalized to snake_case. Similarly, all @@ -51,6 +58,16 @@ def api(self, func: str, **kwargs) -> Union[Dict[str, Any], List[Dict[str, Any]] response = getattr(self.client, func)(**kwargs) return depascalize(response) - def __post_init__(self): + def __post_init__(self, client_kwargs, session_kwargs): # pragma: no cover + if client_kwargs is None: + client_kwargs = {} + if session_kwargs is None: + session_kwargs = {} + if self.session is None: + self.session = Session(**session_kwargs) if self.client is None: - self.client = self.session.client(self.service) + self.client = self.session.client(self.service, **client_kwargs) + + +# Support old naming +APIClient = ApiClient diff --git a/aws_data_tools/client/tests/test_client.py b/aws_data_tools/client/tests/test_client.py new file mode 100644 index 0000000..a6d39f0 --- /dev/null +++ b/aws_data_tools/client/tests/test_client.py @@ -0,0 +1,25 @@ +from aws_data_tools.client import APIClient # noqa: F401 + + +class TestAPIClient: + """Test the APIClient class""" + + def test_api(self): + """Test API calls with the client""" + assert "pass" == "pass" + + def test_init_with_client(self): + """Test initializing an APIClient with a custom botocore client being passed""" + assert "pass" == "pass" + + def test_init_with_client_kwargs(self): + """Test APIClient init with kwargs for the botocore client""" + assert "pass" == "pass" + + def test_init_with_session(self): + """Test initializing an APIClient with a custom botocore session being passed""" + assert "pass" == "pass" + + def test_init_with_session_kwargs(self): + """Test APIClient init with kwargs for the botocore session""" + assert "pass" == "pass" diff --git a/aws_data_tools/conftest.py b/aws_data_tools/conftest.py new file mode 100644 index 0000000..5eb788c --- /dev/null +++ b/aws_data_tools/conftest.py @@ -0,0 +1,30 @@ +import os +from pathlib import Path + +import pytest + + +FIXTURES_PATH = Path(__file__).parent.absolute() / "fixtures" + + +@pytest.fixture(scope="session") +def apiclient_client_kwargs(): + return dict(endpoint_url="http://localhost:5000") + + +@pytest.fixture(scope="session") +def apiclient_session_kwargs(): + return dict( + aws_access_key_id="testing", + aws_secret_access_key="testing", + aws_session_token="testing", + region_name="us-east-1", + ) + + +@pytest.fixture(scope="session") +def aws_credentials(): + os.environ["AWS_ACCESS_KEY_ID"] = "testing" + os.environ["AWS_SECRET_ACCESS_KEY"] = "testing" + os.environ["AWS_SECURITY_TOKEN"] = "testing" + os.environ["AWS_SESSION_TOKEN"] = "testing" diff --git a/aws_data_tools/fixtures/account_paths.txt b/aws_data_tools/fixtures/account_paths.txt new file mode 100644 index 0000000..7c72625 --- /dev/null +++ b/aws_data_tools/fixtures/account_paths.txt @@ -0,0 +1,33 @@ +/acmeinc-not-in-an-ou +/acmeinc-also-not-in-an-ou +/acmeinc-forgotten-child +/GrumpySysadmins/Services/Corp Fileshares/Prod/acmeinc-hr-fileshares-prod +/GrumpySysadmins/Services/Corp Fileshares/Prod/acmeinc-engineering-fileshares-prod +/GrumpySysadmins/Services/Corp Fileshares/Test/acmeinc-engineering-fileshares-test +/GrumpySysadmins/Services/Corp Fileshares/Test/acmeinc-hr-fileshares-test +/GrumpySysadmins/Services/Atlassian/Prod/acmeinc-jira-prod +/GrumpySysadmins/Services/Atlassian/Test/acmeinc-jira-test +/GrumpySysadmins/Services/Atlassian/Prod/acmeinc-bitbucket-prod +/GrumpySysadmins/Services/Atlassian/Test/acmeinc-bitbucket-test +/GrumpySysadmins/Services/Atlassian/Prod/acmeinc-crowd-prod +/GrumpySysadmins/Services/Atlassian/Test/acmeinc-crowd-test +/GrumpySysadmins/Services/Atlassian/Prod/acmeinc-confluence-prod +/GrumpySysadmins/Services/Atlassian/Test/acmeinc-confluence-test +/GrumpySysadmins/Services/Atlassian/Prod/acmeinc-confluence-prod +/GrumpySysadmins/Services/Atlassian/Test/acmeinc-confluence-test +/GrumpySysadmins/Services/LegacyApp/Prod/acmeinc-legacyapp +/Large BU/Logging/Dev/acmeinc-elasticsearch-dev +/Large BU/Logging/Prod/acmeinc-elasticsearch-prod +/Large BU/Logging/QA/acmeinc-elasticsearch-qa +/Large BU/Logging/Staging/acmeinc-elasticsearch-staging +/Large BU/Website/Backend/Dev/acmeinc-large-bu-website-backend-dev +/Large BU/Website/Backend/Prod/acmeinc-large-bu-website-backend-prod +/Large BU/Website/Frontend/Dev/acmeinc-large-bu-website-frontend-dev +/Large BU/Website/Frontend/Prod/acmeinc-large-bu-website-frontend-prod +/Oracle Admins/dev/acmeinc-oracle-admins-db-dev +/Oracle Admins/prd/acmeinc-oracle-admins-db-prd +/Oracle Admins/prdtst/acmeinc-oracle-admins-db-prdtst +/Super Cool BU/Groundbreaking Service/Dev/acmeinc-super-cool-bu-groundbreaking-service-dev +/Super Cool BU/Groundbreaking Service/Prod/acmeinc-super-cool-bu-groundbreaking-service-prod +/Super Cool BU/Testing/acmeinc-super-cool-bu-testing +/YOLO BU/YOLO Service/Production/acmeinc-yolo-service-production diff --git a/aws_data_tools/fixtures/dynamodb_item.json b/aws_data_tools/fixtures/dynamodb_item.json new file mode 100644 index 0000000..c47307d --- /dev/null +++ b/aws_data_tools/fixtures/dynamodb_item.json @@ -0,0 +1,28 @@ +{ + "id": "123456789012", + "name": "A test item", + "test_number": 1, + "test_list": ["foo", "bar", "baz"], + "test_map": { + "test_string": "A test string", + "test_number": 5 + }, + "test_list_of_objects": [ + { + "foo": "bar", + "biz": "baz" + }, + { + "foo": "bar", + "biz": "baz" + } + ], + "test_nested_map": { + "id": { + "test_map": { + "is_nested": true + }, + "test_set": ["just", "a", "set", "with", 2, "mixed", "types"] + } + } +} diff --git a/aws_data_tools/fixtures/dynamodb_item_serialized.json b/aws_data_tools/fixtures/dynamodb_item_serialized.json new file mode 100644 index 0000000..d45f139 --- /dev/null +++ b/aws_data_tools/fixtures/dynamodb_item_serialized.json @@ -0,0 +1,98 @@ +{ + "id": { + "S": "123456789012" + }, + "name": { + "S": "A test item" + }, + "test_number": { + "N": "1" + }, + "test_list": { + "L": [ + { + "S": "foo" + }, + { + "S": "bar" + }, + { + "S": "baz" + } + ] + }, + "test_map": { + "M": { + "test_string": { + "S": "A test string" + }, + "test_number": { + "N": "5" + } + } + }, + "test_list_of_objects": { + "L": [ + { + "M": { + "foo": { + "S": "bar" + }, + "biz": { + "S": "baz" + } + } + }, + { + "M": { + "foo": { + "S": "bar" + }, + "biz": { + "S": "baz" + } + } + } + ] + }, + "test_nested_map": { + "M": { + "id": { + "M": { + "test_map": { + "M": { + "is_nested": { + "BOOL": true + } + } + }, + "test_set": { + "L": [ + { + "S": "just" + }, + { + "S": "a" + }, + { + "S": "set" + }, + { + "S": "with" + }, + { + "N": "2" + }, + { + "S": "mixed" + }, + { + "S": "types" + } + ] + } + } + } + } + } +} diff --git a/aws_data_tools/fixtures/ou_paths.txt b/aws_data_tools/fixtures/ou_paths.txt new file mode 100644 index 0000000..421998c --- /dev/null +++ b/aws_data_tools/fixtures/ou_paths.txt @@ -0,0 +1,36 @@ +/GrumpySysadmins +/GrumpySysadmins/Services +/GrumpySysadmins/Services/Corp Fileshares +/GrumpySysadmins/Services/Corp Fileshares/Prod +/GrumpySysadmins/Services/Corp Fileshares/Test +/GrumpySysadmins/Services/Atlassian +/GrumpySysadmins/Services/Atlassian/Prod +/GrumpySysadmins/Services/Atlassian/Test +/GrumpySysadmins/Services/LegacyApp +/GrumpySysadmins/Services/LegacyApp/Prod +/Large BU +/Large BU/Logging +/Large BU/Logging/Dev +/Large BU/Logging/Prod +/Large BU/Logging/QA +/Large BU/Logging/Staging +/Large BU/Website +/Large BU/Website/Backend +/Large BU/Website/Backend/Dev +/Large BU/Website/Backend/Prod +/Large BU/Website/Frontend +/Large BU/Website/Frontend/Dev +/Large BU/Website/Frontend/Prod +/Large BU/Website/Frontend/QA +/Oracle Admins +/Oracle Admins/dev +/Oracle Admins/prd +/Oracle Admins/prdtst +/Super Cool BU +/Super Cool BU/Groundbreaking Service +/Super Cool BU/Groundbreaking Service/Dev +/Super Cool BU/Groundbreaking Service/Prod +/Super Cool BU/Testing +/YOLO BU +/YOLO BU/YOLO Service +/YOLO BU/YOLO Service/Production diff --git a/aws_data_tools/models/__init__.py b/aws_data_tools/models/__init__.py index 5546b6a..cddddcc 100644 --- a/aws_data_tools/models/__init__.py +++ b/aws_data_tools/models/__init__.py @@ -1,3 +1,12 @@ """ Package containing dataclass representations of AWS API data """ +# flake8: noqa: F401 + +from . import ( + base, + config, + organizations, + sns, + sqs, +) diff --git a/aws_data_tools/models/base.py b/aws_data_tools/models/base.py index bce8408..1e08f9f 100644 --- a/aws_data_tools/models/base.py +++ b/aws_data_tools/models/base.py @@ -3,21 +3,31 @@ """ from dataclasses import asdict, dataclass -from json import dumps as json_dumps -from typing import Any, Dict, List, Union +import json +import logging +from typing import Any, Union -from yaml import dump as yaml_dump +from dacite import from_dict +from humps import decamelize, depascalize +import yaml -from ..utils import serialize_dynamodb_item, serialize_dynamodb_items +from ..utils.dynamodb import serialize_dynamodb_item, serialize_dynamodb_items + +logging.getLogger(__name__).addHandler(logging.NullHandler()) @dataclass class ModelBase: """Base class for all models with helpers for serialization""" + @classmethod + def from_dict(cls, data: dict[str, Any]): + """Initialize the model from a dictionary""" + return from_dict(data_class=cls, data=decamelize(depascalize(data))) + def to_dict( - self, field_name: str = None - ) -> Union[Dict[str, Any], List[Dict[str, Any]]]: + self, field_name: str = None, flatten: bool = False + ) -> Union[dict[str, Any], list[dict[str, Any]]]: # pragma: no cover """ Serialize the dataclass instance to a dict, or serialize a single field. If the field is a collection, it is returned as such. If the field is a simple type, @@ -26,23 +36,52 @@ def to_dict( data = {k: v for k, v in asdict(self).items() if not k.startswith("_")} if field_name is not None: if field_name in data.keys(): - if type(data[field_name]) in [dict, list]: - return data[field_name] + if isinstance(data[field_name], (dict, list)): + return self.data[field_name] return {field_name: data[field_name]} raise Exception(f"Field {field_name} does not exist") return data - def to_dynamodb(self, **kwargs) -> Union[Dict[str, Any], List[Dict[str, Any]]]: + def to_list(self, **kwargs) -> list[dict[str, Any]]: + """Serialize the dataclass instance to a list of dicts (alias for to_dict)""" + data = self.to_dict(**kwargs) + if not isinstance(data, list): + raise Exception("Class or field is not a list") + return data + + def to_dynamodb( + self, **kwargs + ) -> Union[dict[str, Any], list[dict[str, Any]]]: # pragma: no cover """Serialize the dataclass or field to a DynamoDB Item or list of Items""" data = self.to_dict(**kwargs) if isinstance(data, list): return serialize_dynamodb_items(items=data) return serialize_dynamodb_item(item=data) - def to_json(self, **kwargs) -> str: - """Serialize the dataclass instance to JSON""" - return json_dumps(self.to_dict(**kwargs), default=str) + def to_json(self, escape: bool = False, **kwargs) -> str: # pragma: no cover + """Serialize the dataclass instance to a JSON string""" + data = json.dumps(self.to_dict(**kwargs), default=str) + if escape: + return data.replace('"', '"').replace("\n", "\\n") + return data + + @classmethod + def from_json(cls, s: str, **kwargs) -> Any: # pragma: no cover + """Deserialize the JSON string to an instance of the dataclass""" + # Try to remove any escape characters from the string based on the assumption + # that it could be an escape characters + return cls.from_dict(json.loads(s.replace('\\"', '"').replace("\\n", "\n"))) + + def to_yaml(self, escape: bool = False, **kwargs) -> str: # pragma: no cover + """Serialize the dataclass instance to a YAML string""" + data = yaml.dump(self.to_dict(**kwargs)) + if escape: + return data.replace('"', '"').replace("\n", "\\n") + return data - def to_yaml(self, **kwargs) -> str: - """Serialize the dataclass instance to YAML""" - return yaml_dump(self.to_dict(**kwargs)) + @classmethod + def from_yaml(cls, s: str, **kwargs) -> Any: # pragma: no cover + """Deserialize the YAML string to an instance of the dataclass""" + # Try to remove any escape characters from the string based on the assumption + # that it could have escape characters + return cls.from_dict(yaml.safe_load(s.replace('\\"', '"'))) diff --git a/aws_data_tools/models/config.py b/aws_data_tools/models/config.py new file mode 100644 index 0000000..0e8ac9d --- /dev/null +++ b/aws_data_tools/models/config.py @@ -0,0 +1,297 @@ +""" +Classes and utilities for working with notification from AWS Config. This is a work +in progress. + +See the documentation for the structure of various notifications: +https://docs.aws.amazon.com/config/latest/developerguide/notifications-for-AWS-Config.html # noqa +""" + +from dataclasses import dataclass, field +import logging +from typing import Any, Optional, Union + +from .base import ModelBase + +logging.getLogger(__name__).addHandler(logging.NullHandler()) + + +@dataclass +class SnapshotDeliveryStartedNotification(ModelBase): + """Notification sent when a config snapshot delivery is started""" + + message_type: str # Should be "ConfigurationSnapshotDeliveryStarted" + notification_creation_time: str + record_version: str + + config_snapshot_id: Optional[str] + + +@dataclass +class SnapshotDeliveryCompletedNotification(ModelBase): + """Notification sent when a config snapshot delivery is completed""" + + message_type: str # Should be "ConfigurationSnapshotDeliveryCompleted" + notification_creation_time: str + record_version: str + s3_bucket: str + s3_object_key: str + + config_snapshot_id: Optional[str] + + +@dataclass +class HistoryDeliveryStartedNotification(ModelBase): + """Notification sent when config history delivery is started""" + + message_type: str # Should be "ConfigurationHistoryDeliveryStarted" + notification_creation_time: str + record_version: str + + config_snapshot_id: Optional[str] + + +@dataclass +class HistoryDeliveryCompletedNotification(ModelBase): + """Notification sent when config history delivery is completed""" + + message_type: str # Should be "ConfigurationHistoryDeliveryCompleted" + notification_creation_time: str + record_version: str + s3_bucket: str + s3_object_key: str + + config_snapshot_id: Optional[str] + + +@dataclass +class ConfigurationItemRelationshipItem(ModelBase): + """Represents a resource that can be related to a configuration item""" + + name: str + resource_id: str + resource_type: str + resource_name: str + + +@dataclass +class ConfigurationItem(ModelBase): + """Configuration state and relationships for a resource""" + + arn: str + availability_zone: str + aws_account_id: str + configuration: dict[str, Any] + configuration_item_capture_time: str + configuration_item_status: str + configuration_item_version: str + configuration_state_id: str + related_events: list[Any] + relationships: list[ConfigurationItemRelationshipItem] + resource_creation_time: str + resource_id: str + resource_type: str + tags: dict[str, str] + + # TODO: Some examples don't show this field + configuration_state_md5_hash: str = field(default=None) + + # TODO: Some examples don't show this field. Unsure if type is consistent. + # Real-life item change events have a dict of dicts. + supplementary_configuration: dict[str, Any] = field(default_factory=dict) + + +@dataclass +class ConfigurationItemDiffChangedProperty(ModelBase): + """Represents a changed property of a configuration item""" + + change_type: str + previous_value: Optional[Union[int, str, dict[str, Any]]] = field(default=None) + updated_value: Optional[Union[int, str, dict[str, Any]]] = field(default=None) + + +@dataclass +class ConfigurationItemDiff(ModelBase): + """Configuration state and relationships for a resource""" + + change_type: str + changed_properties: dict[str, ConfigurationItemDiffChangedProperty] + + +@dataclass +class ItemChangeNotification(ModelBase): + """Notification sent when configuration has changed for a resource""" + + configuration_item: dict[str, Any] + message_type: str # Should be "ConfigurationItemChangeNotification" + + configuration_item_diff: Optional[ConfigurationItemDiff] + notification_creation_time: Optional[str] + record_version: Optional[str] + + # # TODO: Some examples in the docs say "ConfigurationItem" and others say + # # "ConfigurationItems". For now we'll add both options and mark them as optional. + # configuration_items: list[dict[str, Any]] = field(default_factory=list) + # configuration_item: dict[str, Any] = field(default_factory=dict) + # + # Real-life item change events show "configuration_item" + + +@dataclass +class ComplianceEvaluationResultQualifier(ModelBase): + """Unique qualifiers for an evaluated resource and the name of the rule""" + + config_rule_name: str + resource_type: str + resource_id: str + + +@dataclass +class ComplianceEvaluationResultIdentifier(ModelBase): + """ + Details about a compliance rule evaluatation against a resource, including an + ordering timestamp + """ + + evaluation_result_qualifier: ComplianceEvaluationResultQualifier + ordering_timestamp: str + + +@dataclass +class ComplianceEvaluationResult(ModelBase): + """The result of a rule compliance evaluation""" + + evaluation_result_identifier: ComplianceEvaluationResultIdentifier + compliance_type: str + result_recorded_time: str + config_rule_invoked_time: str + + # Don't know the expected type of either of these. The examples only show "null" + # for the values. Going to mark annotation as Any type and result_token as string. + # TODO: Verify the types when not null. + annotation: Optional[Any] = field(default=None) + result_token: Optional[str] = field(default=None) + + +@dataclass +class ComplianceChangeNotification(ModelBase): + """Notification sent when the compliance status for a resource has changed""" + + aws_account_id: str + config_rule_name: str + config_rule_arn: str + resource_type: str + resource_id: str + aws_region: str + new_evaluation_result: ComplianceEvaluationResult + old_evaluation_result: ComplianceEvaluationResult + notification_creation_time: str + message_type: str # ComplianceChangeNotification + record_version: str + + # m.new_evaluation_result.compliance_type will equal COMPLIANT or NON_CONPLIANT + + +# TODO: It seems there is a "ScheduledEvaluation" event that is created when Config +# triggers a scheduled rule evaluation (periodic). It is sent to the Lambda during +# invocation. +# +# https://docs.aws.amazon.com/config/latest/developerguide/evaluate-config_develop-rules_example-events.html#periodic-example-event + + +@dataclass +class ConfigRulesEvaluationStartedNotification(ModelBase): + """Notification sent when a rule evaluation has started""" + + # TODO: Are there evaluation finished notifications? + + aws_account_id: str + aws_region: str + config_rule_names: list[str] + notification_creation_time: str + message_type: str # ConfigRulesEvaluationStarted + record_version: str + + +@dataclass +class OversizedConfigurationItemSummary(ModelBase): + """ + The subset of configuration details provided by an oversized change notification + for a resource + """ + + arn: str + aws_account_id: str + aws_region: str + change_type: str + configuration_item_capture_time: str + configuration_item_status: str + configuration_item_version: str + configuration_state_id: int + configuration_state_md5_hash: str + resource_creation_time: str + resource_id: str + resource_type: str + + availability_zone: str = field(default=None) + resource_name: str = field(default=None) + + +@dataclass +class OversizedConfigurationItemChangeS3DeliverySummary(ModelBase): + """Details about where an oversized configuration change were delivered in S3""" + + # The bucket location is null if there was a delivery error + s3_bucket_location: str = field(default=None) + error_code: str = field(default=None) + error_message: str = field(default=None) + + +@dataclass +class OversizedConfigurationItemChangeNotification(ModelBase): + """Notification sent when the a configuration change is too large for SNS""" + + change_summary: OversizedConfigurationItemSummary + s3_delivery_summary: OversizedConfigurationItemChangeS3DeliverySummary + + +@dataclass +class DeliveryFailedNotification(ModelBase): + """ + Notification sent when a config snapshot or oversized config item change can't be + delivered to S3 + + https://docs.aws.amazon.com/config/latest/developerguide/notification-delivery-failed.html # noqa + """ + + # TODO: There is only one documented example that shows a failed delivery for an + # oversized configuration item change. It is the same as the oversized changed + # notfication, except the S3 delivery summary has the error code and message fields + # populated, with a null value for the S3 bucket location. + # + # I'm unsure what the notification would look like for a snapshot delivery failure. + pass + + +# Maps the value for the "messageType" field to the corresponding model +MESSAGE_TYPE_MAP = { + "ConfigurationSnapshotDeliveryStarted": SnapshotDeliveryStartedNotification, + "ConfigurationSnapshotDeliveryCompleted": SnapshotDeliveryCompletedNotification, + "ConfigurationHistoryDeliveryStarted": HistoryDeliveryStartedNotification, + "ConfigurationHistoryDeliveryCompleted": HistoryDeliveryCompletedNotification, + "ConfigurationItemChangeNotification": ItemChangeNotification, + "ComplianceChangeNotification": ComplianceChangeNotification, + "ConfigRulesEvaluationStarted": ConfigRulesEvaluationStartedNotification, + "OversizedConfigurationItemChangeNotification": OversizedConfigurationItemChangeNotification, # noqa + "DeliveryFailedNotification": DeliveryFailedNotification, +} + + +def get_model(message_type: str) -> Any: + """ + Takes a message type string and returns the model class based on the above + mapping + """ + model = MESSAGE_TYPE_MAP.get(message_type) + if model is None: + raise Exception(f"Model not found for message type {message_type}") + return model diff --git a/aws_data_tools/models/organizations.py b/aws_data_tools/models/organizations.py index 4517e6c..d24a928 100644 --- a/aws_data_tools/models/organizations.py +++ b/aws_data_tools/models/organizations.py @@ -1,12 +1,25 @@ """ -Dataclass models for working with AWS Organizations APIs +Dataclass builders and models for working with AWS Organizations APIs """ -from dataclasses import dataclass, field -from typing import Dict, List +from datetime import datetime +from dataclasses import dataclass, field, InitVar +import logging +from typing import Any, Union +# Make this an optional dependency and handle import failure +import graphviz + +from ..client import APIClient +from ..utils.tags import query_tags from .base import ModelBase +logging.getLogger(__name__).addHandler(logging.NullHandler()) + + +_SERVICE_NAME = "organizations" +_OU_MAXDEPTH = 5 + @dataclass class ParChild(ModelBase): @@ -15,6 +28,43 @@ class ParChild(ModelBase): id: str type: str + _valid_types = ["ACCOUNT", "ORGANIZATIONAL_UNIT", "ROOT"] + + def __post_init__(self): + if self.type not in self._valid_types: + raise Exception( + f"Invalid type {self.type}. Valid types: {self._valid_types}." + ) + + +_VALID_POLICY_TYPES = [ + "AISERVICES_OPT_OUT_POLICY", + "BACKUP_POLICY", + "SERVICE_CONTROL_POLICY", + "TAG_POLICY", +] + +# Effective policies +_VALID_EFFECTIVE_POLICY_TYPES = [ + policy_type + for policy_type in _VALID_POLICY_TYPES + if policy_type != "SERVICE_CONTROL_POLICY" +] + + +def get_valid_policy_types(effective: bool = True) -> list[str]: + if effective: + return _VALID_EFFECTIVE_POLICY_TYPES + return _VALID_POLICY_TYPES + + +def get_valid_effective_policy_types() -> list[str]: + return get_valid_policy_types(effective=True) + + +class InvalidEffectivePolicyType(TypeError): + pass + @dataclass class EffectivePolicy(ModelBase): @@ -25,6 +75,94 @@ class EffectivePolicy(ModelBase): target_id: str policy_type: str + _valid_policy_types = [ + "AISERVICES_OPT_OUT_POLICY", + "BACKUP_POLICY", + "TAG_POLICY", + ] + + @classmethod + def __ensure_valid_policy_type( + cls, p_type: str + ) -> Union[None, InvalidEffectivePolicyType]: + """Validate the policy_type field""" + valid_policy_types = cls._valid_policy_types + if p_type in valid_policy_types: + return + raise InvalidEffectivePolicyType( + f'Invalid type {p_type}. Valid values are {", ".join(valid_policy_types)}' + ) + + @classmethod + def fetch_data( + cls, policy_type: str, target_id: str, client: APIClient, **kwargs + ) -> dict[str]: + """Return raw dict from DescribeEffectivePolicy API call""" + cls.__ensure_valid_policy_type(policy_type) + data = client.api( + "describe_effective_policy", policy_type=policy_type, target_id=target_id + ) + return data.get("effective_policy") + + @classmethod + def from_api(cls, **kwargs): + """Return an EffectivePolicy instance from API calls""" + data = cls.fetch_data(**kwargs) + return cls.from_dict(data) + + def __post_init__(self): + self.__ensure_valid_policy_type(self.policy_type) + + +@dataclass +class EffectivePolicies(ModelBase): + """A collection of effective policies""" + + policies: list[EffectivePolicy] + + policy_types: list[str] + target_id: str + + _policy_types: list[str] = field(init=False, repr=False) + _target_id: InitVar[str] = field(init=False, repr=False) + + @property + def policy_types(self) -> list[str]: + return self._policy_types + + @policy_types.setter + def policy_types(self, policy_types: list[str]) -> None: + self._policy_types = policy_types + + @property + def target_id(self) -> str: + return self._target_id + + @target_id.setter + def target_id(self, target_id: str) -> None: + self._target_id = target_id + + def fetch_data(self, policy_types: list[str], target_id: str) -> dict[str, str]: + policies = [] + for policy_type in self.policy_types: + data = self.api( + "describe_effective_policy", + policy_type=policy_type, + target_id=self.target_id, + ) + policies.append(EffectivePolicy.from_dict(data)) + self.policies = policies + + @classmethod + def from_api(cls, target_id: str, policy_types: list[str] = None): + raise NotImplementedError + # if policy_types is None: + # policy_types = get_valid_effective_policy_types() + # effective_policy = cls(policy_types=policy_types, target_id=target_id) + + # klass = cls(policy_types=policy_types) + # self.fetch() + @dataclass class PolicySummary(ModelBase): @@ -32,11 +170,25 @@ class PolicySummary(ModelBase): arn: str aws_managed: bool - description: str id: str name: str type: str + description: str = field(default=None) + + _valid_types = [ + "AISERVICES_OPT_OUT_POLICY", + "BACKUP_POLICY", + "SERVICE_CONTROL_POLICY", + "TAG_POLICY", + ] + + def __post_init__(self): + if self.type not in self._valid_types: + raise Exception( + f"Invalid type {self.type}. Valid types: {self._valid_types}." + ) + @dataclass class PolicySummaryForTarget(ModelBase): @@ -48,13 +200,25 @@ class PolicySummaryForTarget(ModelBase): @dataclass class PolicyTargetSummary(ModelBase): - """A summary of a target attached to a policy""" + """A summary of a target attached to a policy. Returned by ListPoliciesForTarget""" arn: str name: str target_id: str type: str + _valid_types = [ + "ACCOUNT", + "ORGANIZATIONAL_UNIT", + "ROOT", + ] + + def __post_init__(self): + if self.type not in self._valid_types: + raise Exception( + f"Invalid type {self.type}. Valid types: {self._valid_types}." + ) + @dataclass class PolicyTypeSummary(ModelBase): @@ -71,18 +235,13 @@ class Policy(ModelBase): policy_summary: PolicySummary # We allow content to be None because ListPolicies doesn't return the content data. - # Instead you have to DescribePolicie to get the content. + # Instead you have to DescribePolicy to get the content. Listing policies generally + # needs to be done first to get the IDs. content: str = field(default=None) # Optional properties generally populated after initialization - tags: Dict[str, str] = field(default=None) - targets: List[PolicyTargetSummary] = field(default=None) - - def to_target(self): - """Return the Policy as a PolicySummaryForTarget object""" - return PolicySummaryForTarget( - id=self.policy_summary.id, type=self.policy_summary.type - ) + tags: dict[str, str] = field(default=None) + targets: list[PolicyTargetSummary] = field(default=None) @dataclass @@ -92,19 +251,46 @@ class Root(ModelBase): arn: str id: str name: str - policy_types: List[PolicyTypeSummary] + policy_types: list[PolicyTypeSummary] # Optional properties generally populated after initialization - children: List[ParChild] = field(default=None) - policies: List[PolicySummaryForTarget] = field(default=None) + children: list[ParChild] = field(default=None) + policies: list[PolicySummaryForTarget] = field(default=None) - def to_parchild_dict(self) -> Dict[str, str]: + def to_parchild_dict(self) -> dict[str, str]: """Return the root as a ParChild (parent) dict""" return {"id": self.id, "type": "ROOT"} def to_parchild(self) -> ParChild: """Return the root as a ParChild (parent) object""" - return ParChild(**self.to_parchild_dict()) + return ParChild.from_dict(self.to_parchild_dict()) + + def fetch_tags(self) -> None: + raise NotImplementedError + # data = query_tags(client=AddApiClientHere, resource_id=self.id) + # return data + + def fetch_policies(self) -> None: + raise NotImplementedError + + def fetch_child_ous(self) -> None: + raise NotImplementedError + + def fetch_child_accounts(self) -> None: + raise NotImplementedError + + def fetch_children(self) -> None: + raise NotImplementedError + + def fetch(self) -> None: + raise NotImplementedError + + def fetch_all(self) -> None: + raise NotImplementedError + + @classmethod + def from_api(cls): + raise NotImplementedError @dataclass @@ -116,18 +302,18 @@ class OrganizationalUnit(ModelBase): name: str # Optional properties generally populated after initialization - children: List[ParChild] = field(default=None) + children: list[ParChild] = field(default=None) parent: ParChild = field(default=None) - policies: List[PolicySummaryForTarget] = field(default=None) - tags: Dict[str, str] = field(default=None) + policies: list[PolicySummaryForTarget] = field(default=None) + tags: dict[str, str] = field(default=None) - def to_parchild_dict(self) -> Dict[str, str]: + def to_parchild_dict(self) -> dict[str, str]: """Return the OU as a ParChild (parent) dict""" return {"id": self.id, "type": "ORGANIZATIONAL_UNIT"} def to_parchild(self) -> ParChild: """Return the OU as a ParChild (parent) object""" - return ParChild(**self.to_parchild_dict()) + return ParChild.from_dict(self.to_parchild_dict()) @dataclass @@ -137,64 +323,615 @@ class Account(ModelBase): arn: str email: str id: str - joined_timestamp: str + joined_timestamp: datetime name: str joined_method: str status: str # Optional properties generally populated after initialization - effective_policies: List[EffectivePolicy] = field(default=None) + effective_policies: list[EffectivePolicy] = field(default=None) parent: ParChild = field(default=None) - policies: List[PolicySummaryForTarget] = field(default=None) - tags: Dict[str, str] = field(default=None) + policies: list[PolicySummaryForTarget] = field(default=None) + tags: dict[str, str] = field(default=None) - def to_parchild_dict(self) -> Dict[str, str]: + def to_parchild_dict(self) -> dict[str, str]: """Return the account as a ParChild (parent) dict""" return {"id": self.id, "type": "ACCOUNT"} def to_parchild(self) -> ParChild: """Return the account as a ParChild (parent) object""" - return ParChild(**self.to_parchild_dict()) + return ParChild.from_dict(self.to_parchild_dict()) @dataclass class Organization(ModelBase): """Represents an organization and all it's nodes and edges""" - # We allow all these fields to default to None so we can support initializing an - # organization object with empty data. - arn: str = field(default=None) - available_policy_types: List[PolicyTypeSummary] = field(default=None) - feature_set: str = field(default=None) - id: str = field(default=None) - master_account_arn: str = field(default=None) - master_account_email: str = field(default=None) - master_account_id: str = field(default=None) - - # Optional properties generally populated after initialization + arn: str + available_policy_types: list[PolicyTypeSummary] + feature_set: str + id: str + master_account_arn: str + master_account_email: str + master_account_id: str # TODO: These collections should be converted to container data classes to be able # to better able to handle operations against specific fields. Currently, # serializing/deserializing these collections indepently requires passing the # "field_name" kwarg to the `to_dict()` function from ModelBase. It's already # getting hacky. - accounts: List[Account] = field(default=None) - organizational_units: List[OrganizationalUnit] = field(default=None) - policies: List[Policy] = field(default=None) + accounts: list[Account] = field(default=None) + organizational_units: list[OrganizationalUnit] = field(default=None) + policies: list[Policy] = field(default=None) root: Root = field(default=None) # Mappings that represent node -> edge relationships in the organization - _parent_child_tree: Dict[str, ParChild] = field( + _parent_child_tree: dict[str, ParChild] = field( default=None, init=False, repr=False ) - _child_parent_tree: Dict[str, ParChild] = field( + _child_parent_tree: dict[str, ParChild] = field( default=None, init=False, repr=False ) - _policy_target_tree: Dict[str, ParChild] = field( + _policy_target_tree: dict[str, ParChild] = field( default=None, init=False, repr=False ) # Mappings that hold a reference to the index of each node in a list - _account_index_map: Dict[str, int] = field(default=None, init=False, repr=False) - _ou_index_map: Dict[str, int] = field(default=None, init=False, repr=False) - _policy_index_map: Dict[str, int] = field(default=None, init=False, repr=False) + _account_index_map: dict[str, int] = field(default=None, init=False, repr=False) + _ou_index_map: dict[str, int] = field(default=None, init=False, repr=False) + _policy_index_map: dict[str, int] = field(default=None, init=False, repr=False) + + def fetch_description(self, include_policies: bool = True) -> None: + org = self.api("describe_organization").get("organization") + root = self.api("list_roots")[0] + policies = {} + if include_policies: + responses = [] + for policy_type in _VALID_POLICY_TYPES: + response = self.api("list_policies", filter=policy_type) + responses.append(response) + policies["policies"] = [ + {"policy_summary": policy_summary} for policy_summary in responses + ] + return Organization.from_dict({**org, **root, **policies}) + + @classmethod + def from_api(include_policies: bool = True): + """Initialize the organization object from the Organizations API(s)""" + raise NotImplementedError + # self.fetch_description() + + def to_dot(self) -> str: + """Return the organization as a GraphViz DOT diagram""" + graph = graphviz.Digraph("Organization", filename="organization.dot") + nodes = [] + nodes.append(self.root) + nodes.extend(self.organizational_units) + nodes.extend(self.accounts) + for node in nodes: + if getattr(node, "parent", None) is None: + continue + shape = None + if isinstance(node, Root): + shape = "circle" + elif isinstance(node, OrganizationalUnit): + shape = "box" + elif isinstance(node, Account): + shape = "ellipse" + else: + continue + graph.node(node.id, label=node.name, shape=shape) + graph.edge(node.parent.id, node.id) + return graphviz.unflatten( + graph.source, + stagger=10, + fanout=10, + chain=10, + ) + + def __post_init__(self) -> None: + # fetch desc + pass + + +@dataclass +class OrganizationDataBuilder(ModelBase): + """ + Performs read-only operations against the Organizations APIs to construct data + models of organizations objects. It can populate data for most supported objects: + + - Organization (the org itself) + - Roots + - Organizational Units + - Policies + - Accounts + - Effective Policies + - Tags + + It currently doesn't support getting data about delegated administrators or + services, handshakes, account creation statuses, or AWS service integrations. + + Provides serialization to dicts and JSON. + """ + + client: APIClient = field(default=None, repr=False) + # dm: Organization = field(default_factory=Organization.from_api) + dm: Organization = field(default=None) + + # Used by __post_init__() to determine what data to initialize (default is none) + init_all: InitVar[bool] = field(default=False) + init_connection: InitVar[bool] = field(default=True) + init_organization: InitVar[bool] = field(default=False) + init_policies: InitVar[bool] = field(default=False) + init_policy_tags: InitVar[bool] = field(default=False) + init_ous: InitVar[bool] = field(default=False) + init_ou_tags: InitVar[bool] = field(default=False) + init_accounts: InitVar[bool] = field(default=False) + init_account_tags: InitVar[bool] = field(default=False) + init_policy_targets: InitVar[bool] = field(default=False) + init_effective_policies: InitVar[bool] = field(default=False) + + include_account_parents: bool = field(default=False) + + @property + def enabled_policy_types(self) -> list[str]: + """Enabled policy types in the organization""" + # TODO (@timoguin): Follow up on AWS support request seeking clarification on + # discrepancies between available policy types between Org and Root. + # + # Apparently, you can enable policy types on a root that do not reflect in the + # available policy types for the organization, so DescribeOrganization and + # ListRoots will answer differently. + # + # Pendind clarifications, just return a list of policy types that are enabled + # on the root. + if self.dm is None: + self.fetch_organization() + return [p.type for p in self.dm.root.policy_types if p.status == "ENABLED"] + + def Connect(self): + """Initialize an authenticated session""" + if self.client is None: + self.client = APIClient(_SERVICE_NAME) + + def api(self, func: str, **kwargs) -> Union[list[dict[str, Any]], dict[str, Any]]: + """Make arbitrary API calls with the session client""" + if self.client is None: + self.Connect() + return self.client.api(func, **kwargs) + + # @staticmethod + def fetch_organization(self, include_policies: bool = True) -> None: + """Initialize the organization object from the Organizations API(s)""" + # TODO: Was trying to convert this to a static method + # raise NotImplementedError + org = self.api("describe_organization").get("organization") + root = self.api("list_roots")[0] + policies = {} + if include_policies: + responses = [] + for policy_type in _VALID_POLICY_TYPES: + response = self.api("list_policies", filter=policy_type) + responses.extend(response) + policies["policies"] = [ + {"policy_summary": policy_summary} for policy_summary in responses + ] + root = {"root": root} + self.dm = Organization.from_dict({**org, **root, **policies}) + + def __e_policies(self) -> list[dict[str, Any]]: + """Extract organization policy data from ListPolicies and DescribePolicy""" + ret = [] + for p_type in self.enabled_policy_types: + policies = [] + p_summaries = self.api("list_policies", filter=p_type) + for p_summary in p_summaries: + p_detail = self.api("describe_policy", policy_id=p_summary["id"]).get( + "policy" + ) + policies.append(p_detail) + ret.extend(policies) + return ret + + def __t_policies(self) -> list[Policy]: + """Deserialize list of policy dicts into a list of Policy objects""" + return [Policy.from_dict(policy) for policy in self.__e_policies()] + + def __l_policies(self) -> None: + """Load policy objects into dm.policies field""" + self.dm.policies = self.__t_policies() + if self.dm._policy_index_map is None: + self.dm._policy_index_map = {} + for i, policy in enumerate(self.dm.policies): + self.dm._policy_index_map[policy.policy_summary.id] = i + + def fetch_policies(self) -> None: + """Initialize the list of Policy objects in the organization""" + self.__l_policies() + + def __e_policy_targets_for_id(self, policy_id: str) -> list[PolicyTargetSummary]: + """Extract a list of policy targets from ListTargetsForPolicy""" + return self.api("list_targets_for_policy", policy_id=policy_id) + + def __e_policy_targets(self) -> dict[str, list[dict[str, Any]]]: + """Extract target summary data for all policies""" + ret = {} + if self.dm.policies is None: + self.fetch_policies() + for policy in self.dm.policies: + pid = policy.policy_summary.id + data = self.__e_policy_targets_for_id(policy_id=pid) + for target in data: + if ret.get(pid) is None: + ret[pid] = [] + ret[pid].append(target) + return ret + + def __lookup_obj_index(self, obj_type: str, obj_id: str) -> int: + """Lookup the list index of a type of node in the data model""" + map_field = None + if obj_type == "account": + map_field = self.dm._account_index_map + elif obj_type == "ou" or obj_type == "organizational_unit": + map_field = self.dm._ou_index_map + elif obj_type == "policy": + map_field = self.dm._policy_index_map + return map_field[obj_id] + + def __lookup_account_index(self, account_id: str) -> int: + """Lookup the index of an account object in the dm.accounts field""" + return self.__lookup_obj_index("account", account_id) + + def __lookup_ou_index(self, ou_id: str) -> int: + """Lookup the list index of an OU in the dm.organizational_units field""" + return self.__lookup_obj_index("ou", ou_id) + + def __lookup_policy_index(self, policy_id: str) -> int: + """Lookup the list index of an account in dm.accounts""" + return self.__lookup_obj_index("policy", policy_id) + + def __t_policy_targets( + self, + ) -> dict[str, dict[str, list[Union[PolicySummaryForTarget, PolicySummary]]]]: + """ + Deserialize policy targets into a dict of PolicySummaryForTarget and + PolicyTargetSummary objects + """ + data = {} + for pid, p_targets in self.__e_policy_targets().items(): + p_index = self.__lookup_policy_index(pid) + p_type = self.dm.policies[p_index].policy_summary.type + for p_target in p_targets: + p_summary_for_target = PolicySummaryForTarget.from_dict( + {"id": pid, "type": p_type} + ) + if data.get(pid) is None: + data[pid] = { + "policy_index": p_index, + "policy_summary_for_targets": p_summary_for_target, + "target_details": [], + } + data[pid]["target_details"].append( + PolicyTargetSummary.from_dict(p_target) + ) + return data + + def __l_policy_targets(self) -> None: + """Load policy target objects and data into the data model""" + data = self.__t_policy_targets() + for pid, d in data.items(): + p_index = d["policy_index"] + # Update "targets" for Policy objects + self.dm.policies[p_index].targets = d["target_details"] + # Update "policies" for target objects + for target in d["target_details"]: + if target.type == "ROOT": + if self.dm.root.policies is None: + self.dm.root.policies = [] + self.dm.root.policies.append(d["policy_summary_for_targets"]) + elif target.type == "ORGANIZATIONAL_UNIT": + ou_index = self.__lookup_ou_index(target.target_id) + if self.dm.organizational_units[ou_index].policies is None: + self.dm.organizational_units[ou_index].policies = [] + self.dm.organizational_units[ou_index].policies.append( + d["policy_summary_for_targets"] + ) + elif target.type == "ACCOUNT": + acct_index = self.__lookup_account_index(target.target_id) + if self.dm.accounts[acct_index].policies is None: + self.dm.accounts[acct_index].policies = [] + self.dm.accounts[acct_index].policies.append( + d["policy_summary_for_targets"] + ) + + def fetch_policy_targets(self) -> None: + """Initialize the list of Policy objects in the organization""" + self.__l_policy_targets() + + def __e_ous_recurse( + self, + parents: list[ParChild] = None, + ous: list[OrganizationalUnit] = None, + depth: int = 0, + maxdepth: int = _OU_MAXDEPTH, + ) -> list[OrganizationalUnit]: + """Recurse the org tree and return a list of OU dicts""" + if parents is None: + parents = [self.dm.root.to_parchild()] + if self.dm._parent_child_tree is None: + self.dm._parent_child_tree = {} + if self.dm._child_parent_tree is None: + self.dm._child_parent_tree = {} + if self.dm.organizational_units is None: + self.dm.organizational_units = [] + if depth == maxdepth or len(parents) == 0: + return ous + if ous is None: + ous = [] + next_parents = [] + for parent in parents: + if self.dm._parent_child_tree.get(parent.id) is None: + self.dm._parent_child_tree[parent.id] = [] + ou_results = self.api( + "list_organizational_units_for_parent", parent_id=parent.id + ) + for ou_result in ou_results: + ou = OrganizationalUnit.from_dict(ou_result) + ou.parent = parent + ou_to_parchild = ou.to_parchild() + self.dm._parent_child_tree[parent.id].append(ou_to_parchild) + self.dm._child_parent_tree[ou.id] = parent + ous.append(ou) + next_parents.append(ou_to_parchild) + acct_results = self.api("list_accounts_for_parent", parent_id=parent.id) + for acct_result in acct_results: + account = Account.from_dict(acct_result) + account.parent = parent + self.dm._parent_child_tree[parent.id].append(account.to_parchild()) + self.dm._child_parent_tree[account.id] = parent + return self.__e_ous_recurse(parents=next_parents, ous=ous, depth=depth + 1) + + def __e_ous(self) -> list[OrganizationalUnit]: + """Extract the OU tree recursively, including OUs and child accounts""" + return self.__e_ous_recurse() + + def __t_ous(self) -> list[OrganizationalUnit]: + """Transform OU objects by populating child relationships""" + data = self.__e_ous() + ous = [] + for ou in data: + ou.children = self.dm._parent_child_tree[ou.id] + ous.append(ou) + return ous + + def __l_ous(self) -> None: + """Load deserialized org tree into data models (root and OUs)""" + ous = self.__t_ous() + self.dm.root.children = self.dm._parent_child_tree[self.dm.root.id] + self.dm.organizational_units = ous + if self.dm._ou_index_map is None: + self.dm._ou_index_map = {} + for i, ou in enumerate(self.dm.organizational_units): + self.dm._ou_index_map[ou.id] = i + + def fetch_ous(self) -> None: + """Recurse the org tree and populate relationship data for nodes""" + self.__l_ous() + + def __e_accounts(self) -> list[dict[str, Any]]: + """Extract the list of accounts in the org""" + return self.api("list_accounts") + + def __t_accounts(self) -> list[Account]: + """Transform account data into a list of Account objects""" + return [Account.from_dict(account) for account in self.__e_accounts()] + + def __l_accounts(self, include_parents: bool = False) -> None: + """Load account objects with parent relationship data""" + data = self.__t_accounts() + accounts = [] + for result in data: + account = result + if include_parents or self.include_account_parents: + if self.dm._child_parent_tree is None: + self.fetch_ous() + account.parent = self.dm._child_parent_tree[account.id] + accounts.append(account) + self.dm.accounts = accounts + if self.dm._account_index_map is None: + self.dm._account_index_map = {} + for i, account in enumerate(self.dm.accounts): + self.dm._account_index_map[account.id] = i + + def fetch_accounts(self, **kwargs) -> None: + """Initialize the list of Account objects in the organization""" + self.__l_accounts(**kwargs) + + def __e_effective_policies_for_target( + self, target_id: str + ) -> list[EffectivePolicy]: + """Extract a list of effective policies for a target node""" + effective_policies = [] + for p_type in self.enabled_policy_types: + # SCPs aren't supported for effective policies + if p_type == "SERVICE_CONTROL_POLICY": + continue + data = self.api( + "describe_effective_policy", policy_type=p_type, target_id=target_id + ) + effective_policies.append(data) + return effective_policies + + def __e_effective_policies( + self, account_ids: list[str] = None + ) -> dict[int, list[dict[str, Any]]]: + """Extract the effective policies for accounts or a list of account IDs""" + ret = {} + if self.dm.accounts is None: + self.fetch_accounts() + if account_ids is None: + account_ids = [account.id for account in self.dm.accounts] + for account_id in account_ids: + ret[account_id] = self.__e_effective_policies_for_target(account_id) + return ret + + def __t_effective_policies(self, **kwargs) -> dict[int, list[EffectivePolicy]]: + """Transform effective policy data into a list of EffectivePolicy""" + return [EffectivePolicy.from_dict(d) for d in self.__e_effective_policies()] + + def __l_effective_policies(self, **kwargs) -> None: + """Load effective policy objects into the account tree""" + for acct_id, effective_policies in self.__e_effective_policies().items(): + acct_index = self.__lookup_account_index(acct_id) + self.dm.accounts[acct_index].effective_policies = effective_policies + + def fetch_effective_policies(self, **kwargs) -> None: + """Initialize effective policy data for accounts in the org""" + self.__l_effective_policies(**kwargs) + + def __et_tags(self, resource_ids: list[str]) -> dict[str, dict[str, str]]: + """Extract and transform tags for a list of resource IDs""" + ret = {} + for resource_id in resource_ids: + ret[resource_id] = query_tags(self.client, resource_id) + return ret + + def __l_account_tags(self, account_ids: list[str] = None, **kwargs) -> None: + """Load tags for accounts in the organization""" + if self.dm.accounts is None: + self.fetch_accounts() + if account_ids is None: + account_ids = [account.id for account in self.dm.accounts] + data = self.__et_tags(resource_ids=account_ids) + for acct_id, tags in data.items(): + acct_index = self.__lookup_account_index(acct_id) + self.dm.accounts[acct_index].tags = tags + + def fetch_account_tags(self, **kwargs) -> None: + """Initialize tags for accounts in the organization""" + self.__l_account_tags(**kwargs) + + def __l_ou_tags(self, ou_ids: list[str] = None) -> None: + """Load tags for OUs in the organization""" + if self.dm.organizational_units is None: + self.fetch_organizational_units() + if ou_ids is None: + ou_ids = [ou.id for ou in self.dm.organizational_units] + data = self.__et_tags(resource_ids=ou_ids) + for ou_id, tags in data.items(): + ou_index = self.__lookup_ou_index(ou_id) + self.dm.organizational_units[ou_index].tags = tags + + def fetch_ou_tags(self, **kwargs) -> None: + """Initialize tags for OUs in the organization""" + self.__l_ou_tags(**kwargs) + + def __l_root_tags(self) -> None: + """Load tags for the organization root""" + data = self.__et_tags(resource_ids=[self.dm.root.id]) + self.dm.root.tags = data[self.dm.root.id] + + @property + def is_initialized(self) -> bool: + if self.dm is None: + return False + return True + + def fetch_root_tags(self) -> None: + """Initialize tags for the organization root""" + self.__l_root_tags() + + def __l_policy_tags(self, policy_ids: list[str] = None) -> None: + """Load tags for policies in the organization""" + if self.dm.policies is None: + self.fetch_policies() + if policy_ids is None: + policy_ids = [ + policy.policy_summary.id + for policy in self.dm.policies + if not policy.policy_summary.aws_managed + ] + data = self.__et_tags(resource_ids=policy_ids) + for policy_id, tags in data.items(): + policy_index = self.__lookup_policy_index(policy_id) + self.dm.policies[policy_index].tags = tags + + def fetch_policy_tags(self, **kwargs) -> None: + """Initialize tags for policies in the organization""" + self.__l_policy_tags(**kwargs) + + def fetch_all_tags(self) -> None: + """Initialize and populate tags for all taggable objects in the organization""" + self.fetch_root_tags() + self.fetch_policy_tags() + self.fetch_ou_tags() + self.fetch_account_tags() + + def to_dict(self, **kwargs) -> dict[str, Any]: + """Return the data model for the organization as a dictionary""" + return self.dm.to_dict(**kwargs) + + def to_dynamodb(self, **kwargs) -> dict[str, Any]: + """Return the data model for the organization as a DynamoDB Item""" + return self.dm.to_dynamodb(**kwargs) + + def to_json(self, **kwargs) -> str: + """Return the data model for the organization as a JSON string""" + return self.dm.to_json(**kwargs) + + def to_yaml(self, **kwargs) -> str: + """Return the data model for the organization as a YAML string""" + return self.dm.to_yaml(**kwargs) + + def fetch_all(self) -> None: + """Initialize all data for nodes and edges in the organization""" + self.Connect() + self.fetch_organization() + self.fetch_root_tags() + self.fetch_policies() + self.fetch_policy_tags() + self.fetch_ous() + self.fetch_ou_tags() + self.fetch_accounts() + self.fetch_account_tags() + self.fetch_policy_targets() + self.fetch_effective_policies() + + def __post_init__( + self, + init_all: bool, + init_connection: bool, + init_organization: bool, + init_policies: bool, + init_policy_tags: bool, + init_ous: bool, + init_ou_tags: bool, + init_accounts: bool, + init_account_tags: bool, + init_policy_targets: bool, + init_effective_policies: bool, + ) -> None: + """Initialize all or selected data for the organization""" + if init_all: + self.fetch_all() + return + if init_connection: + self.Connect() + if init_organization: + self.fetch_organization() + if init_policies: + self.fetch_policies() + if init_policy_tags: + self.fetch_policy_tags() + if init_ous: + self.fetch_ous() + if init_ou_tags: + self.fetch_ou_tags() + if init_accounts: + self.fetch_accounts() + if init_account_tags: + self.fetch_account_tags() + if init_policy_targets: + self.fetch_policy_targets() + if init_effective_policies: + self.fetch_effective_policies() diff --git a/aws_data_tools/models/sns.py b/aws_data_tools/models/sns.py new file mode 100644 index 0000000..ef40c53 --- /dev/null +++ b/aws_data_tools/models/sns.py @@ -0,0 +1,62 @@ +from dataclasses import dataclass +import logging +from typing import Optional + +from .base import ModelBase +from ..utils.validators import is_valid_json + +logging.getLogger(__name__).addHandler(logging.NullHandler()) + + +@dataclass +class SnsMessageData(ModelBase): + """Represents the data from an SNS message that is under the "Sns" field""" + + message: str + message_id: str + signature: str + signature_version: str + signing_cert_url: str + subject: str + timestamp: str + topic_arn: str + type: str + unsubscribe_url: str + + # TODO: Unsure if there are additional attributes if the message comes from an SNS + # FIFO topic + + +@dataclass +class SnsMessage(ModelBase): + """Schema for an SNS message""" + + message: str + message_id: str + subject: str + timestamp: str + topic_arn: str + type: str + unsubscribe_url: str + + signature: Optional[str] + signature_version: Optional[str] + signing_cert_url: Optional[str] + + # TODO: Unsure if there are additional attributes if the message comes from an SNS + # FIFO topic + + @property + def is_body_json(self) -> bool: + """Check if the message body is a JSON string""" + return is_valid_json(self.message) + + +@dataclass +class LambdaSnsMessage(ModelBase): + """Represents the SNS message format when using Lambda subscriptions""" + + event_source: str + event_version: str + event_subscription_arn: str + sns: SnsMessage diff --git a/aws_data_tools/models/sqs.py b/aws_data_tools/models/sqs.py new file mode 100644 index 0000000..75aa70e --- /dev/null +++ b/aws_data_tools/models/sqs.py @@ -0,0 +1,95 @@ +from dataclasses import dataclass, field +from hashlib import md5 +import logging +from typing import Optional + +from .base import ModelBase +from ..utils.validators import is_valid_json + +logging.getLogger(__name__).addHandler(logging.NullHandler()) + + +@dataclass +class SqsMessageAttributes(ModelBase): + """Attributes attached to an SQS message""" + + # TODO: Unsure if some of these string will need to be cast to int or other types + approximate_receive_count: str + approximate_first_receive_timestamp: str + sender_id: str + sent_timestamp: str + + # Optional attributes added when using FIFO queues + aws_trace_header: str = field(default=None) + message_deduplication_id: str = field(default=None) + message_group_id: str = field(default=None) + sequence_number: str = field(default=None) + + _FIFO_ATTRIBUTES = [ + "message_deduplication_id", + "message_group_id", + "sequence_number", + ] + + @property + def is_fifo(self) -> bool: + """Check if there are FIFO-specific attributes""" + for attr in self._FIFO_ATTRIBUTES: + if getattr(self, attr) is None: + return False + return True + + +@dataclass +class SqsCustomMessageAttributeDefinition(ModelBase): + """Type definition and value for a custom message attribute""" + + data_type: str + string_value: str + + +@dataclass +class SqsMessage(ModelBase): + """Schema for an SQS message""" + + body: str + md5_of_body: str + message_id: str + receipt_handle: str + + attributes: Optional[SqsMessageAttributes] + message_attributes: Optional[dict[str, SqsCustomMessageAttributeDefinition]] + event_source: Optional[str] + event_source_arn: Optional[str] + aws_region: Optional[str] + + # Optional + md5_of_message_attributes: Optional[str] = field(default=None) + + @property + def is_fifo(self) -> bool: + """Check if a message came from a FIFO queue""" + return self.attributes.is_fifo() + + @property + def calculated_md5_of_message_attributes(self) -> str: + """Calculate the MD5 checksum of the message attributes""" + # TODO: Figure out the algorithm to build a digest of the message attributes. + # View the following documentation for details: + # https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-message-metadata.html#sqs-attributes-md5-message-digest-calculation # noqa + raise NotImplementedError + + @property + def is_md5_of_message_attributes_valid(self) -> bool: + return ( + self.md5_of_message_attributes == self.calculated_md5_of_message_attributes + ) + + @property + def is_md5_of_body_valid(self) -> bool: + return md5(self.body.encode("utf-8")).hexdigest() == self.md5_of_body + + @property + def is_body_json(self) -> bool: + """Check if the message body is a JSON string""" + return is_valid_json(self.body) diff --git a/aws_data_tools/models/tests/fixtures/config/item-change-notification.json b/aws_data_tools/models/tests/fixtures/config/item-change-notification.json new file mode 100644 index 0000000..768c379 --- /dev/null +++ b/aws_data_tools/models/tests/fixtures/config/item-change-notification.json @@ -0,0 +1,12 @@ +{ + "invokingEvent": "{\"configurationItem\":{\"configurationItemCaptureTime\":\"2016-10-06T16:46:16.261Z\",\"awsAccountId\":\"123456789012\",\"configurationItemStatus\":\"OK\",\"resourceId\":\"i-00000000\",\"resourceName\":\"foo\",\"configurationStateMd5Hash\":\"8f1ee69b297895a0f8bc5753eca68e96\",\"resourceCreationTime\":\"2016-10-06T16:46:10.489Z\",\"configurationStateId\":0,\"configurationItemVersion\":\"1.2\",\"ARN\":\"arn:aws:ec2:us-east-1:123456789012:instance/i-00000000\",\"awsRegion\":\"us-east-1\",\"availabilityZone\":\"us-east-1\",\"resourceType\":\"AWS::EC2::Instance\",\"tags\":{\"\":\"\"},\"relationships\":[{\"resourceId\":\"eipalloc-00000000\",\"resourceType\":\"AWS::EC2::EIP\",\"name\":\"Is attached to ElasticIp\"}],\"configuration\":{\"\":\"\"}},\"messageType\":\"ConfigurationItemChangeNotification\"}", + "ruleParameters": "{\"\":\"\"}", + "resultToken": "myResultToken", + "eventLeftScope": false, + "executionRoleArn": "arn:aws:iam::123456789012:role/config-role", + "configRuleArn": "arn:aws:config:us-east-1:123456789012:config-rule/config-rule-0123456", + "configRuleName": "change-triggered-config-rule", + "configRuleId": "config-rule-0123456", + "accountId": "123456789012", + "version": "1.0" +} diff --git a/aws_data_tools/models/tests/fixtures/config/oversized-item-change-notification.json b/aws_data_tools/models/tests/fixtures/config/oversized-item-change-notification.json new file mode 100644 index 0000000..f71a38d --- /dev/null +++ b/aws_data_tools/models/tests/fixtures/config/oversized-item-change-notification.json @@ -0,0 +1,12 @@ +{ + "invokingEvent": "{\"configurationItemSummary\": {\"changeType\": \"UPDATE\",\"configurationItemVersion\": \"1.2\",\"configurationItemCaptureTime\":\"2016-10-06T16:46:16.261Z\",\"configurationStateId\": 0,\"awsAccountId\":\"123456789012\",\"configurationItemStatus\": \"OK\",\"resourceType\": \"AWS::EC2::Instance\",\"resourceId\":\"i-00000000\",\"resourceName\":null,\"ARN\":\"arn:aws:ec2:us-east-1:123456789012:instance/i-00000000\",\"awsRegion\": \"us-east-1\",\"availabilityZone\":\"us-east-1\",\"configurationStateMd5Hash\":\"8f1ee69b287895a0f8bc5753eca68e96\",\"resourceCreationTime\":\"2016-10-06T16:46:10.489Z\"},\"messageType\":\"OversizedConfigurationItemChangeNotification\"}", + "ruleParameters": "{\"\":\"\"}", + "resultToken": "myResultToken", + "eventLeftScope": false, + "executionRoleArn": "arn:aws:iam::123456789012:role/config-role", + "configRuleArn": "arn:aws:config:us-east-1:123456789012:config-rule/config-rule-0123456", + "configRuleName": "change-triggered-config-rule", + "configRuleId": "config-rule-0123456", + "accountId": "123456789012", + "version": "1.0" +} diff --git a/aws_data_tools/models/tests/fixtures/config/periodic-rule.json b/aws_data_tools/models/tests/fixtures/config/periodic-rule.json new file mode 100644 index 0000000..d1856ed --- /dev/null +++ b/aws_data_tools/models/tests/fixtures/config/periodic-rule.json @@ -0,0 +1,12 @@ +{ + "invokingEvent": "{\"awsAccountId\":\"123456789012\",\"notificationCreationTime\":\"1970-01-01T00:00:00.0Z\",\"messageType\":\"ScheduledNotification\",\"recordVersion\":\"1.0\"}", + "ruleParameters": "{\"myParameterKey\":\"myParameterValue\"}", + "resultToken": "myResultToken", + "eventLeftScope": false, + "executionRoleArn": "arn:aws:iam::123456789012:role/config-role", + "configRuleArn": "arn:aws:config:us-east-1:123456789012:config-rule/config-rule-0123456", + "configRuleName": "periodic-config-rule", + "configRuleId": "config-rule-0123456", + "accountId": "123456789012", + "version": "1.0" +} diff --git a/aws_data_tools/models/tests/fixtures/sns/notification.json b/aws_data_tools/models/tests/fixtures/sns/notification.json new file mode 100644 index 0000000..bb6cc64 --- /dev/null +++ b/aws_data_tools/models/tests/fixtures/sns/notification.json @@ -0,0 +1,31 @@ +{ + "Records": [ + { + "EventSource": "aws:sns", + "EventVersion": "1.0", + "EventSubscriptionArn": "arn:aws:sns:us-east-1::ExampleTopic", + "Sns": { + "Type": "Notification", + "MessageId": "95df01b4-ee98-5cb9-9903-4c221d41eb5e", + "TopicArn": "arn:aws:sns:us-east-1:123456789012:ExampleTopic", + "Subject": "example subject", + "Message": "example message", + "Timestamp": "1970-01-01T00:00:00.000Z", + "SignatureVersion": "1", + "Signature": "EXAMPLE", + "SigningCertUrl": "EXAMPLE", + "UnsubscribeUrl": "EXAMPLE", + "MessageAttributes": { + "Test": { + "Type": "String", + "Value": "TestString" + }, + "TestBinary": { + "Type": "Binary", + "Value": "TestBinary" + } + } + } + } + ] +} diff --git a/aws_data_tools/models/tests/fixtures/sqs/receive-message.json b/aws_data_tools/models/tests/fixtures/sqs/receive-message.json new file mode 100644 index 0000000..f697abd --- /dev/null +++ b/aws_data_tools/models/tests/fixtures/sqs/receive-message.json @@ -0,0 +1,20 @@ +{ + "Records": [ + { + "messageId": "19dd0b57-b21e-4ac1-bd88-01bbb068cb78", + "receiptHandle": "MessageReceiptHandle", + "body": "Hello from SQS!", + "attributes": { + "ApproximateReceiveCount": "1", + "SentTimestamp": "1523232000000", + "SenderId": "123456789012", + "ApproximateFirstReceiveTimestamp": "1523232000001" + }, + "messageAttributes": {}, + "md5OfBody": "7b270e59b47ff90a553787216d55d91d", + "eventSource": "aws:sqs", + "eventSourceARN": "arn:aws:sqs:us-east-1:123456789012:MyQueue", + "awsRegion": "us-east-1" + } + ] +} diff --git a/aws_data_tools/models/tests/test_base.py b/aws_data_tools/models/tests/test_base.py new file mode 100644 index 0000000..9099175 --- /dev/null +++ b/aws_data_tools/models/tests/test_base.py @@ -0,0 +1,86 @@ +# TODO: Implement these tests using a model that inherits from ModelBase. As-is the +# coverage report doesn't think any of the ModelBase code is executed. +# +# from dataclasses import dataclass, field, InitVar +# import json +# from typing import List +# +# import pytest +# import yaml +# +# from aws_data_tools.models.base import ModelBase +# +# +# @dataclass +# class Model(ModelBase): +# """A dataclass model for testing ModelBase""" +# is_test: bool = field(default=True) +# test_string: str = field(default="This is a test") +# test_listfield: List[str] = field(default_factory=list) +# test_initvar: InitVar[str] = field(default="hello") +# +# @property +# def expected_dict(self): +# return { +# "is_test": True, +# "test_string": "This is a test", +# "test_listfield": ["foo", "bar", "baz"], +# } +# +# @property +# def expected_listfield(self): +# return self.expected_dict["test_listfield"] +# +# @property +# def expected_dynamodb(self): +# return { +# "is_test": {"BOOL": True}, +# "test_string": {"S": "This is a test"}, +# "test_listfield": {"L": [{"S": "foo"}, {"S": "bar"}, {"S": "baz"}]}, +# } +# +# @property +# def expected_json(self): +# return json.dumps(self.expected_dict) +# +# @property +# def expected_yaml(self): +# return yaml.dump(self.expected_dict) +# +# def __post_init__(self, test_listfield): +# self.test_listfield = ["foo", "bar", "baz"] +# +# +# class TestModelBase: +# """Test the ModelBase class""" +# +# @pytest.fixture() +# def model(self): +# return Model() +# +# @pytest.mark.parametrize("field_name", [None, "test_listfield", "badfield"]) +# def test_to_dict(self, model, field_name): +# """Test serializing the model to a dict""" +# if field_name is not None: +# if field_name == "test_listfield": +# assert model.to_dict( +# field_name=field_name +# ) == model.expected_listfield +# elif field_name == "badfield": +# with pytest.raises(Exception): +# model.to_dict(field_name=field_name) +# else: +# assert model.to_dict() == model.expected_dict +# +# def test_to_dynamodb(self, model): +# """Test serializing the model to a DynamoDB Item dict""" +# assert True is True +# assert model.to_dynamodb() == model.expected_dynamodb +# +# def test_to_json(self, model): +# """Test serializing the model to JSON""" +# assert model.to_json() == model.expected_json +# +# def test_to_yaml(self, model): +# """Test serializing the model to YAML""" +# assert model.to_yaml() == model.expected_yaml diff --git a/aws_data_tools/models/tests/test_organizations.py b/aws_data_tools/models/tests/test_organizations.py new file mode 100644 index 0000000..c8ef936 --- /dev/null +++ b/aws_data_tools/models/tests/test_organizations.py @@ -0,0 +1,539 @@ +# flake8: noqa: F401 +from typing import Union +from unittest import mock + +import graphviz +from humps import depascalize +from moto import mock_organizations +import pytest + +from aws_data_tools.conftest import FIXTURES_PATH +from aws_data_tools.client import APIClient +from aws_data_tools.models.organizations import ( + Account, + EffectivePolicy, + Organization, + OrganizationDataBuilder, + OrganizationalUnit, + ParChild, + Policy, + PolicySummary, + PolicySummaryForTarget, + PolicyTargetSummary, + PolicyTypeSummary, + Root, +) + + +class TestParChild: + """Test the ParChild model""" + + @pytest.mark.parametrize("type_", ["ACCOUNT", "INVALID_TYPE"]) + def test_init(self, type_): + if type_ == "ACCOUNT": + parchild = ParChild(id="123456", type=type_) + assert isinstance(parchild, ParChild) + assert parchild.to_dict() == {"id": "123456", "type": type_} + elif type_ == "INVALID_TYPE": + with pytest.raises(Exception): + parchild = ParChild(id="123456", type=type_) + + +class TestPolicy: + """Test the Policy model""" + + @pytest.fixture + def policy(self): + policy_summary = { + "arn": "arn:aws:us-east-1:organizations:policy/p-asdfjkl", + "aws_managed": False, + "description": "Just a test policy summary", + "id": "p-asdfjkl", + "name": "TestPolicy", + "type": "SERVICE_CONTROL_POLICY", + } + return Policy(policy_summary=PolicySummary(**policy_summary)) + + @property + def expected_to_target(self, policy): + return PolicySummaryForTarget(id=policy["id"], type=policy["type"]) + + def test_to_target(self, policy): + assert isinstance(policy, Policy) + assert isinstance(policy["policy_summary"], PolicySummary) + pol_to_target = policy.to_target() + assert isinstance(pol_to_target, PolicySummaryForTarget) + assert pol_to_target == self.expected_to_target + + +class TestEffectivePolicy: + """Test the EffectivePolicy model""" + + @pytest.mark.parametrize("type_", ["SERVICE_CONTROL_POLICY", "TAG_POLICY"]) + def test_init(self, type_): + p_data = { + "last_updated_timestamp": "2018-05-14 22:17:25.989000-05:00", + "policy_content": '{"Statement":[{"Effect":"Allow","Action":["*"],"Resource":["*"]}]}', + "policy_type": type_, + "target_id": "123456789012", + } + if type_ == "TAG_POLICY": + effective_policy = EffectivePolicy(**p_data) + assert isinstance(effective_policy, EffectivePolicy) + elif type_ == "SERVICE_CONTROL_POLICY": + with pytest.raises(Exception): + effective_policy = EffectivePolicy(**p_data) + + +class TestPolicySummary: + """Test the PolicySummary model""" + + @pytest.fixture + def policy_summary_map(self): + data = { + "arn": "arn:aws:us-east-1:organizations:policy/p-asdfjkl", + "aws_managed": False, + "description": "Just a test policy summary", + "id": "p-asdfjkl", + "name": "TestPolicy", + "type": "SERVICE_CONTROL_POLICY", + } + invalid_data = data.copy() + invalid_data["type"] = "INVALID_TYPE" + return { + "valid": data, + "invalid": invalid_data, + } + + @pytest.mark.parametrize("type_", ["valid", "invalid"]) + def test_init(self, policy_summary_map, type_): + if type_ == "valid": + policy_summary = PolicySummary(**policy_summary_map["valid"]) + assert isinstance(policy_summary, PolicySummary) + elif type_ == "invalid": + with pytest.raises(Exception): + policy_summary = PolicySummary(**policy_summary_map["invalid"]) + + +class TestPolicySummaryForTarget: + """Test the PolicySummaryForTarget model""" + + def test_init(self): + assert True is True + + +class TestPolicyTargetSummary: + """Test the PolicyTargetSummary model""" + + def test_init(self): + assert True is True + + +class TestPolicyTypeSummary: + """Test the PolicyTypeSummary model""" + + def test_init(self): + assert True is True + + +class TestPolicy: + """Test the Policy model""" + + def test_to_target(self): + assert True is True + + +class TestRoot: + """Test the Root model""" + + def test_to_parchild_dict(self): + assert True is True + + def test_to_parchild(self): + assert True is True + + +class TestOrganizationalUnit: + """Test the OrganizationalUnit model""" + + def test_to_parchild_dict(self): + assert True is True + + def test_to_parchild(self): + assert True is True + + +class TestAccount: + """Test the Account model""" + + def test_to_parchild_dict(self): + assert True is True + + def test_to_parchild(self): + assert True is True + + +class TestOrganization: + """Test the Organization model""" + + def test_init(self): + assert True is True + + +class TestOrganizationDataBuilder: + """Test the OrganizationDataBuilder class""" + + @pytest.fixture(scope="class") + def client( + self, + apiclient_session_kwargs, + aws_credentials, + ) -> APIClient: + """An APIClient instance with a mocked Organizations client""" + return APIClient( + "organizations", + client_kwargs=apiclient_session_kwargs, + session_kwargs=apiclient_session_kwargs, + ) + + @pytest.fixture(scope="function") + def builder(self, client): + return OrganizationDataBuilder(client=client) + + @staticmethod + def process_ou_paths(paths: list[str]) -> dict[str, dict[str, str]]: + """Convert a list of OU paths into a tree""" + if paths is None: + paths = [] + path_tree = {} + # { + # "/GrumpySysadmins": { + # "name": "GrumpySysadmins", + # "parent_path": "/" + # } + # "/GrumpySysadmins/Services": { + # "name": "Services", + # "parent_path": "/GrumpySysadmins" + # } + # } + for path in paths: + elements = path.split("/") + path_name = elements[-1] + parent_path = f"{str.join('/', elements[:-1])}" + if parent_path == "": + parent_path = "/" + if path_tree.get(parent_path) is None: + if parent_path == "/": + path_tree[parent_path] = {"depth": 0} + else: + path_tree[parent_path] = {} + if path_tree[parent_path].get("children") is None: + path_tree[parent_path]["children"] = [] + path_tree[parent_path]["children"].append(path_name) + path_tree[path] = { + "depth": len(elements) - 1, + "name": path_name, + "parent_path": parent_path, + } + return path_tree + + @staticmethod + def process_account_paths(paths: list[str]) -> dict[str, dict[str, str]]: + """Process a list of account paths in a list of account dicts""" + if paths is None: + paths = [] + processed_paths = [] + # [ + # { + # "name": "acct-1", + # "path": "/GrumpySysadmins/acct-1", + # "parent_path": "/GrumptySysadmins" + # }, + # { + # "name": "acct-2", + # "path": "/GrumpySysadmins/Services/acct-2", + # "parent_path": "/GrumpySysadmins/Services" + # } + # ] + for path in paths: + elements = path.split("/") + parent_path = f"{str.join('/', elements[:-1])}" + if parent_path == "": + parent_path = "/" + processed_paths.append( + {"name": elements[-1], "path": path, "parent_path": parent_path} + ) + return processed_paths + + @staticmethod + def process_pathfile( + filepath: str, path_type: str + ) -> Union[dict[str, dict[str, str]], list[dict[str, str]]]: + """Read a file that's a list of paths and generate a map or list of maps""" + paths = None + with open(filepath, "r") as f: + paths = [line.rstrip("\n") for line in f.readlines()] + if path_type == "ou": + return self.process_ou_paths(paths) + elif path_type == "account": + return self.process_account_paths(paths) + else: + raise Exception(f"Invalid path type {path_type}") + + @pytest.fixture(scope="class") + def ou_paths(self): + """A tree of OUs to create""" + path = FIXTURES_PATH / "ou_paths.txt" + return self.process_pathfile(path, path_type="ou") + + @pytest.fixture(scope="class") + def account_paths(self): + """A list of accounts to create with populated parent path data""" + path = FIXTURES_PATH / "account_paths.txt" + return self.process_pathfile(path, path_type="account") + + @staticmethod + def create_test_organization(aws_credentials, client) -> Organization: + create_org = client.api("create_organization", feature_set="ALL").get( + "organization" + ) + org = Organization.from_dict(create_org) + list_roots = client.api("list_roots")[0] + org.root = Root.from_dict(list_roots) + return org + + @pytest.fixture(scope="class") + def root(self, organization) -> Root: + return organization.root + + @pytest.fixture(scope="class") + @mock_organizations + def organizational_units( + self, client, root, ou_paths + ) -> dict[str, OrganizationalUnit]: + """Create the test OUs""" + created_ous = {} + maxdepth = 5 + for i in range(1, maxdepth): + tree = {k: v for k, v in ou_paths.items() if v["depth"] == i} + for k, v in tree.items(): + ou_name = v["name"] + parent_id = None + parent_path = v["parent_path"] + if parent_path == "/": + parent_id = root.id + else: + parent_id = created_ous[parent_path].id + data = client.api( + "create_organizational_unit", name=ou_name, parent_id=parent_id + ).get("organizational_unit") + created_ous[k] = OrganizationalUnit.from_dict(data) + return created_ous + + @pytest.fixture(scope="class") + def account_parent_map(self, organization, organizational_units): + """Creates a map of parent path to parent id to be used in account creation""" + data = {"/": organization.root.id} + for ou_path, ou in organizational_units.items(): + data[ou_path] = ou.id + return data + + @pytest.fixture(scope="class") + @mock_organizations + def accounts( + self, + client, + organization, + organizational_units, + account_parent_map, + account_paths, + ) -> dict[str, Account]: + """Create the test accounts""" + created_accounts = {} + for account in account_paths: + account_name = account["name"] + email = account["name"] + "@example.com" + parent_id = account_parent_map[account["parent_path"]] + create_account_status = client.api( + "create_account", + account_name=account_name, + email=email, + ).get("create_account_status") + account_id = create_account_status["account_id"] + move_account = client.api( + "move_account", + account_id=account_id, + destination_parent_id=parent_id, + source_parent_id=odb.dm.root.id, + ) + if move_account["response_metadata"]["http_status_code"] != 200: + raise Exception(f"Error creating account {account_name}") + data = client.api("describe_account", account_id=account_id).get("account") + created_accounts[account["path"]] = Account.from_dict(data) + return created_accounts + + @pytest.fixture(scope="class") + @mock_organizations + def policies(self, client, organization): + """Return policies in the test organization""" + policies = [] + for p_type in organization.root.policy_types: + p_type_policies = [] + data = client.api("list_policies", filter=p_type.type) + for p_summary in data: + p_desc = self.api("descibe_policy", policy_id=p_summary["id"]).get( + "policy" + ) + p_type_policies.append(Policy.from_data(p_desc)) + policies.extend(p_type_policies) + return policies + + @mock_organizations + def test_fetch_organization(self, builder, organization): + builder.fetch_organization() + organization.root = None + assert isinstance(builder.dm, Organization) + assert builder.dm == organization + + @mock_organizations + def test_fetch_root(self, builder, root): + builder.fetch_organization() + builder.fetch_root() + assert isinstance(builder.dm.root, Root) + assert builder.dm.root == root + + @mock_organizations + def test_fetch_policies(self, builder, policies): + builder.fetch_organization() + builder.fetch_root() + builder.fetch_policies() + for policy in builder.dm.policies: + assert isinstance(policy, Policy) + assert builder.dm.policies == policies + + @mock_organizations + def test_fetch_policy_targets( + self, + builder, + root, + organizational_units, + accounts, + policies, + ): + expected_targets = [root] + expected_targets.extend(organizational_units) + expected_targets.extend(accounts) + expected = {} + for target in expected_targets: + target_type = depascalize(type(target)) + expected[target.id] = target_type + builder.fetch_organization() + builder.fetch_root() + builder.fetch_policies() + # TODO: This test isn't fleshed out. We're not yet creating any policies of our + # own when seeding the test organization, so the only policy that will exist is + # the default p-AWSFullAccess policy. We're only grabbing the first element. + assert builder.dm.policies[0] == policies[0] + fetched = {} + for target in builder.dm.policies[0].targets: + fetched[target.id] = target.type + assert isinstance(target, PolicyTargetSummary) + assert fetched == expected + + @mock_organizations + def test_fetch_ous(self, builder, organizational_units): + expected = {ou.id: ou for ou in organizational_units.values()} + builder.fetch_organization() + builder.fetch_root() + builder.fetch_ous() + fetched = {} + for ou in builder.dm.organizational_units: + fetched[ou.id] = ou + assert isinstance(ou, OrganizationalUnit) + assert fetched == expected + + @mock_organizations + def test_fetch_ou_tags(self, builder, organizational_units): + builder.fetch_ous() + builder.fetch_ou_tags() + expected = {ou.id: ou.tags for ou in organizational_units.values()} + fetched = {ou.id: ou.tags for ou in builder.dm.organizational_units} + assert fetched == expected + + @pytest.mark.parametrize("include_parents", [True, False]) + @mock_organizations + def test_fetch_accounts(self, builder, accounts, include_parents): + expected = {} + if include_parents: + expected = {account.id: account for account in accounts.values()} + else: + for account in accounts.values(): + account_no_parent = account + account_no_parent.parent = None + expected[account_no_parent.id] = account_no_parent + builder.fetch_organization() + builder.fetch_root() + builder.fetch_accounts(include_parents=include_parents) + fetched = {} + for account in builder.dm.accounts: + fetched[account.id] = account + assert isinstance(account, Account) + assert fetched == expected + + @mock_organizations + def test_fetch_account_tags(self, builder, accounts): + builder.fetch_accounts() + builder.fetch_account_tags() + expected = {account.id: account.tags for account in accounts.values()} + fetched = {account.id: account.tags for account in builder.dm.accounts} + assert fetched == expected + + @mock_organizations + def test_fetch_effective_policies(self, builder, accounts): + builder.fetch_accounts(include_parents=False) + builder.fetch_effective_policies() + # TODO: Again, since we're not actually creating any policies when seeding the + # test organization, there shouldn't actually be any effective policies. This + # test is naive. + expected = { + accounts.id: account.effective_policies for account in accounts.values() + } + fetched = { + account.id: account.effective_policies for account in builder.dm.accounts + } + assert fetched == expected + + def test_fetch_root_tags(self, odb): + assert True is True + + def test_fetch_policy_tags(self, odb): + assert True is True + + def test_fetch_all_tags(self, odb): + assert True is True + + @mock_organizations + def test_fetch_all( + self, + builder, + organization, + root, + organizational_units, + policies, + accounts, + ): + builder.fetch_all() + org = organization + org.root = root + org.organizational_units = organizational_units.values() + org.policies = policies + org.accounts = accounts.values() + assert builder.dm == org + + @mock.patch("builtins.open", create=True) + @mock_organizations + def test_to_dot(self, builder, organization): + builder.fetch_all() + source_str = builder.to_dot() + source = graphviz.Source(source_str, filename="test.png", format="png") + output = source.render() diff --git a/aws_data_tools/utils.py b/aws_data_tools/utils.py deleted file mode 100644 index a7a3772..0000000 --- a/aws_data_tools/utils.py +++ /dev/null @@ -1,53 +0,0 @@ -""" -Utilities for common operations that happen across different services -""" -from typing import Any, Dict, List - -from boto3.dynamodb.types import TypeDeserializer, TypeSerializer - -from .client import APIClient - - -def tag_list_to_dict(tags: List[Dict[str, str]]) -> Dict[str, str]: - """Convert a list of tag objects to a dict""" - return {tag["key"]: tag["value"] for tag in tags} - - -def query_tags(client: APIClient, resource_id: str) -> Dict[str, str]: - """Get a dict of tags for a resource""" - tags = client.api("list_tags_for_resource", resource_id=resource_id) - if len(tags) == 0: - return {} - return tag_list_to_dict(tags) - - -def serialize_dynamodb_item(item: Dict[str, Any]) -> Dict[str, Any]: - """Convert a dict to a DynamoDB Item""" - serializer = TypeSerializer() - return {key: serializer.serialize(value) for key, value in item.items()} - - -def serialize_dynamodb_items(items: List[Dict[str, Any]]) -> List[Dict[str, Any]]: - """Convert a list of dicts to a list of DynamoDB Items""" - return [serialize_dynamodb_item(item) for item in items] - - -def deserialize_dynamodb_item(item: Dict[str, Any]) -> Dict[str, Any]: - """Convert a DynamoDB Item to a dict""" - deserializer = TypeDeserializer() - return {key: deserializer.deserialize(value) for key, value in item.items()} - - -def deserialize_dynamodb_items(items: List[Dict[str, Any]]) -> List[Dict[str, Any]]: - """Convert a list of DynamoDB Items to a list of dicts""" - return [deserialize_dynamodb_item(item) for item in items] - - -def prepare_dynamodb_batch_put_request( - table: str, - items: List[Dict[str, Any]], -) -> Dict[str, List[Dict[str, Any]]]: - """Prepare PutRequest input for a DynamoDB BatchWriteItem request""" - return { - table: [{"PutRequest": {"Item": item}} for item in items if item is not None] - } diff --git a/aws_data_tools/utils/__init__.py b/aws_data_tools/utils/__init__.py new file mode 100644 index 0000000..9225ccc --- /dev/null +++ b/aws_data_tools/utils/__init__.py @@ -0,0 +1,6 @@ +""" +Utilities for common operations that happen across different services +""" +# flake8: noqa: F401 + +from . import dynamodb, tags, validators diff --git a/aws_data_tools/utils/dynamodb.py b/aws_data_tools/utils/dynamodb.py new file mode 100644 index 0000000..2769382 --- /dev/null +++ b/aws_data_tools/utils/dynamodb.py @@ -0,0 +1,50 @@ +"""Utilities for working with DynamoDB""" + +import json +import logging +from typing import Any + +from boto3.dynamodb.types import TypeDeserializer, TypeSerializer + +logging.getLogger(__name__).addHandler(logging.NullHandler()) + + +def deserialize_dynamodb_item(item: dict[str, Any]) -> dict[str, Any]: + """Convert a DynamoDB Item to a dict""" + deserializer = TypeDeserializer() + return {key: deserializer.deserialize(value) for key, value in item.items()} + + +def deserialize_dynamodb_items(items: list[dict[str, Any]]) -> list[dict[str, Any]]: + """Convert a list of DynamoDB Items to a list of dicts""" + return [deserialize_dynamodb_item(item) for item in items] + + +def serialize_dynamodb_item(item: dict[str, Any]) -> dict[str, Any]: + """Convert a dict to a DynamoDB Item""" + serializer = TypeSerializer() + return {key: serializer.serialize(value) for key, value in item.items()} + + +def serialize_dynamodb_items(items: list[dict[str, Any]]) -> list[dict[str, Any]]: + """Convert a list of dicts to a list of DynamoDB Items""" + return [serialize_dynamodb_item(item) for item in items] + + +def prepare_dynamodb_batch_put_request( + table: str, + items: list[dict[str, Any]], +) -> dict[str, list[dict[str, Any]]]: + """Prepare PutRequest input for a DynamoDB BatchWriteItem request""" + return { + table: [{"PutRequest": {"Item": item}} for item in items if item is not None] + } + + +def is_valid_json(s: str) -> bool: + """Check if a string is valid JSON""" + try: + json.loads(s) + except ValueError: + return False + return True diff --git a/aws_data_tools/utils/tags.py b/aws_data_tools/utils/tags.py new file mode 100644 index 0000000..7ac6a87 --- /dev/null +++ b/aws_data_tools/utils/tags.py @@ -0,0 +1,20 @@ +import logging + +from humps import depascalize + +from ..client import APIClient + +logging.getLogger(__name__).addHandler(logging.NullHandler()) + + +def tag_list_to_dict(tags: list[dict[str, str]]) -> dict[str, str]: + """Convert a list of tag objects to a dict""" + return {tag["key"]: tag["value"] for tag in depascalize(tags)} + + +def query_tags(client: APIClient, resource_id: str) -> dict[str, str]: + """Get a dict of tags for a resource""" + tags = client.api("list_tags_for_resource", resource_id=resource_id) + if len(tags) == 0: + return {} + return tag_list_to_dict(tags) diff --git a/aws_data_tools/utils/tests/test_dynamodb.py b/aws_data_tools/utils/tests/test_dynamodb.py new file mode 100644 index 0000000..268795d --- /dev/null +++ b/aws_data_tools/utils/tests/test_dynamodb.py @@ -0,0 +1,68 @@ +import json +from pathlib import Path + +import pytest + +from aws_data_tools.utils.dynamodb import ( + deserialize_dynamodb_item, + deserialize_dynamodb_items, + prepare_dynamodb_batch_put_request, + serialize_dynamodb_item, + serialize_dynamodb_items, +) + + +FIXTURES_PATH = Path(__file__).parent.parent.parent.absolute() / "fixtures" + + +@pytest.fixture +def dynamodb_item(): + data = {} + with open(FIXTURES_PATH / "dynamodb_item.json", "rb") as f: + data = json.load(f) + return data + + +@pytest.fixture +def dynamodb_item_serialized(): + data = {} + with open(FIXTURES_PATH / "dynamodb_item_serialized.json", "rb") as f: + data = json.load(f) + return data + + +def test_deserialize_dynamodb_item(dynamodb_item, dynamodb_item_serialized): + data = deserialize_dynamodb_item(dynamodb_item_serialized) + assert data == dynamodb_item + + +def test_deserialize_dynamodb_items(dynamodb_item, dynamodb_item_serialized): + serialized_items = [dynamodb_item_serialized, dynamodb_item_serialized] + deserialized_items = [dynamodb_item, dynamodb_item] + data = deserialize_dynamodb_items(serialized_items) + assert data == deserialized_items + + +def test_serialize_dynamodb_item(dynamodb_item, dynamodb_item_serialized): + data = serialize_dynamodb_item(dynamodb_item) + assert data == dynamodb_item_serialized + + +def test_serialize_dynamodb_items(dynamodb_item, dynamodb_item_serialized): + deserialized_items = [dynamodb_item, dynamodb_item] + serialized_items = [dynamodb_item_serialized, dynamodb_item_serialized] + data = serialize_dynamodb_items(deserialized_items) + assert data == serialized_items + + +def test_prepare_dynamodb_batch_put_request(dynamodb_item_serialized): + table = "TestTable" + items = [dynamodb_item_serialized, dynamodb_item_serialized] + expected = { + table: [ + {"PutRequest": {"Item": dynamodb_item_serialized}}, + {"PutRequest": {"Item": dynamodb_item_serialized}}, + ] + } + data = prepare_dynamodb_batch_put_request(table, items) + assert data == expected diff --git a/aws_data_tools/utils/tests/test_tags.py b/aws_data_tools/utils/tests/test_tags.py new file mode 100644 index 0000000..ed44a2c --- /dev/null +++ b/aws_data_tools/utils/tests/test_tags.py @@ -0,0 +1,56 @@ +from moto import mock_organizations +import pytest + + +from aws_data_tools.client import APIClient +from aws_data_tools.utils.tags import query_tags, tag_list_to_dict + + +@pytest.fixture() +def tag_list_map(): + return { + "empty": [], + "not_empty": [ + {"Key": "Test", "Value": "True"}, + {"Key": "TestTwo", "Value": "yes"}, + {"Key": "TestAgain", "Value": "Here we go again"}, + ], + } + + +@pytest.fixture() +def expected_tags_map(request): + return { + "empty": {}, + "not_empty": { + "Test": "True", + "TestTwo": "yes", + "TestAgain": "Here we go again", + }, + } + + +@pytest.mark.parametrize("tags_type", ["empty", "not_empty"]) +@mock_organizations +def test_query_tags(aws_credentials, tag_list_map, expected_tags_map, tags_type): + client = APIClient("organizations") + tag_list = tag_list_map[tags_type] + expected_tags = expected_tags_map[tags_type] + _ = client.api("create_organization", feature_set="ALL") + account = client.api( + "create_account", + account_name="TestAccount", + email="example@example.com", + iam_user_access_to_billing="ALLOW", + tags=tag_list, + ).get("create_account_status") + tags = query_tags(client=client, resource_id=account["account_id"]) + assert tags == expected_tags + + +@pytest.mark.parametrize("tags_type", ["empty", "not_empty"]) +def test_tag_list_to_dict(tag_list_map, expected_tags_map, tags_type): + tag_list = tag_list_map[tags_type] + expected_tags = expected_tags_map[tags_type] + tags = tag_list_to_dict(tag_list) + assert tags == expected_tags diff --git a/aws_data_tools/utils/validators.py b/aws_data_tools/utils/validators.py new file mode 100644 index 0000000..64a0b02 --- /dev/null +++ b/aws_data_tools/utils/validators.py @@ -0,0 +1,13 @@ +import json +import logging + +logging.getLogger(__name__).addHandler(logging.NullHandler()) + + +def is_valid_json(s: str) -> bool: + """Check if a string is valid JSON""" + try: + json.loads(s) + except ValueError: + return False + return True diff --git a/docker-compose.yaml b/docker-compose.yaml new file mode 100644 index 0000000..2b8117b --- /dev/null +++ b/docker-compose.yaml @@ -0,0 +1,6 @@ +version: "3.9" + +services: + motoserver: + image: motoserver/moto:2.0.5 + ports: ["5000:5000"] diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index 703417e..0000000 --- a/docker-compose.yml +++ /dev/null @@ -1,7 +0,0 @@ -version: "3.9" - -services: - motoserver: - container_name: adt_motoserver - image: ${IMAGE_NAME-motoserver/moto}:${IMAGE_TAG-2.0.5} - ports: ["5000:5000"] diff --git a/poetry.lock b/poetry.lock index 5d6b747..45d291b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,11 +1,3 @@ -[[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "main" -optional = false -python-versions = "*" - [[package]] name = "appnope" version = "0.1.2" @@ -16,24 +8,23 @@ python-versions = "*" [[package]] name = "argon2-cffi" -version = "20.1.0" +version = "21.1.0" description = "The secure Argon2 password hashing algorithm." category = "main" optional = true -python-versions = "*" +python-versions = ">=3.5" [package.dependencies] cffi = ">=1.0.0" -six = "*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest", "sphinx", "wheel", "pre-commit"] -docs = ["sphinx"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest", "sphinx", "furo", "wheel", "pre-commit"] +docs = ["sphinx", "furo"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] [[package]] name = "astroid" -version = "2.5.6" +version = "2.8.6" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false @@ -41,15 +32,8 @@ python-versions = "~=3.6" [package.dependencies] lazy-object-proxy = ">=1.4.0" -wrapt = ">=1.11,<1.13" - -[[package]] -name = "async-generator" -version = "1.10" -description = "Async generators and context managers for Python 3.5+" -category = "main" -optional = true -python-versions = ">=3.5" +typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} +wrapt = ">=1.11,<1.14" [[package]] name = "atomicwrites" @@ -92,31 +76,48 @@ category = "main" optional = true python-versions = "*" +[[package]] +name = "backports.entry-points-selectable" +version = "1.1.1" +description = "Compatibility shim providing selectable entry points for older implementations" +category = "main" +optional = false +python-versions = ">=2.7" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest", "pytest-flake8", "pytest-cov", "pytest-black (>=0.3.7)", "pytest-mypy", "pytest-checkdocs (>=2.4)", "pytest-enabler (>=1.0.1)"] + [[package]] name = "black" -version = "21.6b0" +version = "21.11b1" description = "The uncompromising code formatter." category = "main" optional = false python-versions = ">=3.6.2" [package.dependencies] -appdirs = "*" click = ">=7.1.2" mypy-extensions = ">=0.4.3" -pathspec = ">=0.8.1,<1" -regex = ">=2020.1.8" -toml = ">=0.10.1" +pathspec = ">=0.9.0,<1" +platformdirs = ">=2" +regex = ">=2021.4.4" +tomli = ">=0.2.6,<2.0.0" +typing-extensions = [ + {version = ">=3.10.0.0", markers = "python_version < \"3.10\""}, + {version = "!=3.10.0.1", markers = "python_version >= \"3.10\""}, +] [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.6.0)", "aiohttp-cors (>=0.4.0)"] -python2 = ["typed-ast (>=1.4.2)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +python2 = ["typed-ast (>=1.4.3)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "blacken-docs" -version = "1.10.0" +version = "1.12.0" description = "Run `black` on python code blocks in documentation files" category = "main" optional = true @@ -127,11 +128,11 @@ black = ">=19.3b0" [[package]] name = "bleach" -version = "3.3.0" +version = "4.1.0" description = "An easy safelist-based HTML-sanitizing tool." category = "main" optional = true -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.6" [package.dependencies] packaging = "*" @@ -140,24 +141,27 @@ webencodings = "*" [[package]] name = "boto3" -version = "1.17.94" +version = "1.20.11" description = "The AWS SDK for Python" category = "main" optional = false -python-versions = ">= 2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">= 3.6" [package.dependencies] -botocore = ">=1.20.94,<1.21.0" +botocore = ">=1.23.11,<1.24.0" jmespath = ">=0.7.1,<1.0.0" -s3transfer = ">=0.4.0,<0.5.0" +s3transfer = ">=0.5.0,<0.6.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.20.94" +version = "1.23.11" description = "Low-level, data-driven core of boto 3." category = "main" optional = false -python-versions = ">= 2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">= 3.6" [package.dependencies] jmespath = ">=0.7.1,<1.0.0" @@ -165,11 +169,11 @@ python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.11.15)"] +crt = ["awscrt (==0.12.5)"] [[package]] name = "certifi" -version = "2021.5.30" +version = "2021.10.8" description = "Python package for providing Mozilla's CA Bundle." category = "dev" optional = false @@ -177,7 +181,7 @@ python-versions = "*" [[package]] name = "cffi" -version = "1.14.5" +version = "1.15.0" description = "Foreign Function Interface for Python calling C code." category = "main" optional = false @@ -188,23 +192,26 @@ pycparser = "*" [[package]] name = "cfgv" -version = "3.3.0" +version = "3.3.1" description = "Validate configuration and produce human readable error messages." category = "main" optional = true python-versions = ">=3.6.1" [[package]] -name = "chardet" -version = "4.0.0" -description = "Universal encoding detector for Python 2 and 3" +name = "charset-normalizer" +version = "2.0.7" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] [[package]] name = "click" -version = "8.0.1" +version = "8.0.3" description = "Composable command line interface toolkit" category = "main" optional = false @@ -213,6 +220,20 @@ python-versions = ">=3.6" [package.dependencies] colorama = {version = "*", markers = "platform_system == \"Windows\""} +[[package]] +name = "click-completion" +version = "0.5.2" +description = "Fish, Bash, Zsh and PowerShell completion for Click" +category = "main" +optional = true +python-versions = "*" + +[package.dependencies] +click = "*" +jinja2 = "*" +shellingham = "*" +six = "*" + [[package]] name = "colorama" version = "0.4.4" @@ -221,9 +242,20 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "coverage" +version = "5.5" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.extras] +toml = ["toml"] + [[package]] name = "cryptography" -version = "3.4.7" +version = "36.0.0" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." category = "dev" optional = false @@ -234,15 +266,34 @@ cffi = ">=1.12" [package.extras] docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["doc8", "pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] +docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] -sdist = ["setuptools-rust (>=0.11.4)"] +sdist = ["setuptools_rust (>=0.11.4)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["pytest (>=6.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] +test = ["pytest (>=6.2.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] + +[[package]] +name = "dacite" +version = "1.6.0" +description = "Simple creation of data classes from dictionaries." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +dev = ["pytest (>=5)", "pytest-cov", "coveralls", "black", "mypy", "pylint"] + +[[package]] +name = "debugpy" +version = "1.5.1" +description = "An implementation of the Debug Adapter Protocol for Python" +category = "main" +optional = true +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" [[package]] name = "decorator" -version = "5.0.9" +version = "5.1.0" description = "Decorators for Humans" category = "main" optional = true @@ -258,7 +309,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "distlib" -version = "0.3.2" +version = "0.3.3" description = "Distribution utilities" category = "main" optional = false @@ -274,24 +325,28 @@ python-versions = ">=2.7" [[package]] name = "filelock" -version = "3.0.12" +version = "3.4.0" description = "A platform independent file lock." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" + +[package.extras] +docs = ["furo (>=2021.8.17b43)", "sphinx (>=4.1)", "sphinx-autodoc-typehints (>=1.12)"] +testing = ["covdefaults (>=1.2.0)", "coverage (>=4)", "pytest (>=4)", "pytest-cov", "pytest-timeout (>=1.4.2)"] [[package]] name = "flake8" -version = "3.9.2" +version = "4.0.1" description = "the modular source code checker: pep8 pyflakes and co" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.6" [package.dependencies] mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.7.0,<2.8.0" -pyflakes = ">=2.3.0,<2.4.0" +pycodestyle = ">=2.8.0,<2.9.0" +pyflakes = ">=2.4.0,<2.5.0" [[package]] name = "flakehell" @@ -315,7 +370,7 @@ dev = ["dlint", "flake8-2020", "flake8-aaa", "flake8-absolute-import", "flake8-a [[package]] name = "ghp-import" -version = "2.0.1" +version = "2.0.2" description = "Copy your docs directly to the gh-pages branch." category = "main" optional = true @@ -325,29 +380,30 @@ python-versions = "*" python-dateutil = ">=2.8.1" [package.extras] -dev = ["twine", "markdown", "flake8"] +dev = ["twine", "markdown", "flake8", "wheel"] [[package]] name = "gitdb" -version = "4.0.7" +version = "4.0.9" description = "Git Object Database" category = "main" optional = true -python-versions = ">=3.4" +python-versions = ">=3.6" [package.dependencies] -smmap = ">=3.0.1,<5" +smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.17" -description = "Python Git Library" +version = "3.1.24" +description = "GitPython is a python library used to interact with Git repositories" category = "main" optional = true -python-versions = ">=3.5" +python-versions = ">=3.7" [package.dependencies] gitdb = ">=4.0.1,<5" +typing-extensions = {version = ">=3.7.4.3", markers = "python_version < \"3.10\""} [[package]] name = "graphviz" @@ -364,26 +420,26 @@ test = ["mock (>=3)", "pytest (>=4)", "pytest-mock (>=2)", "pytest-cov"] [[package]] name = "identify" -version = "2.2.10" +version = "2.4.0" description = "File identification library for Python" category = "main" optional = true python-versions = ">=3.6.1" [package.extras] -license = ["editdistance-s"] +license = ["ukkonen"] [[package]] name = "idna" -version = "2.10" +version = "3.3" description = "Internationalized Domain Names in Applications (IDNA)" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.5" [[package]] name = "importlib-metadata" -version = "4.5.0" +version = "4.8.2" description = "Read metadata from Python packages" category = "main" optional = true @@ -394,7 +450,8 @@ zipp = ">=0.5" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +perf = ["ipython"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] [[package]] name = "iniconfig" @@ -419,25 +476,27 @@ toml = {version = ">=0.10.2", markers = "python_version > \"3.6\""} [[package]] name = "ipykernel" -version = "5.5.5" +version = "6.5.1" description = "IPython Kernel for Jupyter" category = "main" optional = true -python-versions = ">=3.5" +python-versions = ">=3.7" [package.dependencies] appnope = {version = "*", markers = "platform_system == \"Darwin\""} -ipython = ">=5.0.0" -jupyter-client = "*" -tornado = ">=4.2" -traitlets = ">=4.1.0" +debugpy = ">=1.0.0,<2.0" +ipython = ">=7.23.1" +jupyter-client = "<8.0" +matplotlib-inline = ">=0.1.0,<0.2.0" +tornado = ">=4.2,<7.0" +traitlets = ">=5.1.0,<6.0" [package.extras] -test = ["pytest (!=5.3.4)", "pytest-cov", "flaky", "nose", "jedi (<=0.17.2)"] +test = ["pytest (!=5.3.4)", "pytest-cov", "flaky", "nose", "ipyparallel"] [[package]] name = "ipython" -version = "7.24.1" +version = "7.29.0" description = "IPython: Productive Interactive Computing" category = "main" optional = true @@ -477,20 +536,21 @@ python-versions = "*" [[package]] name = "isort" -version = "5.8.0" +version = "5.10.1" description = "A Python utility / library to sort Python imports." category = "dev" optional = false -python-versions = ">=3.6,<4.0" +python-versions = ">=3.6.1,<4.0" [package.extras] pipfile_deprecated_finder = ["pipreqs", "requirementslib"] requirements_deprecated_finder = ["pipreqs", "pip-api"] colors = ["colorama (>=0.4.3,<0.5.0)"] +plugins = ["setuptools"] [[package]] name = "jedi" -version = "0.18.0" +version = "0.18.1" description = "An autocompletion tool for Python that can be used for text editors." category = "main" optional = true @@ -501,11 +561,11 @@ parso = ">=0.8.0,<0.9.0" [package.extras] qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<6.0.0)"] +testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] name = "jinja2" -version = "3.0.1" +version = "3.0.3" description = "A very fast and expressive template engine." category = "main" optional = false @@ -527,30 +587,30 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "jsonschema" -version = "3.2.0" +version = "4.2.1" description = "An implementation of JSON Schema validation for Python" category = "main" optional = true -python-versions = "*" +python-versions = ">=3.7" [package.dependencies] attrs = ">=17.4.0" -pyrsistent = ">=0.14.0" -six = ">=1.11.0" +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" [package.extras] -format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] -format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format_nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] [[package]] name = "jupyter-client" -version = "6.2.0" +version = "7.1.0" description = "Jupyter protocol implementation and client libraries" category = "main" optional = true python-versions = ">=3.6.1" [package.dependencies] +entrypoints = "*" jupyter-core = ">=4.6.0" nest-asyncio = ">=1.5" python-dateutil = ">=2.1" @@ -559,19 +619,19 @@ tornado = ">=4.1" traitlets = "*" [package.extras] -doc = ["sphinx (>=1.3.6)", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] -test = ["async-generator", "ipykernel", "ipython", "mock", "pytest-asyncio", "pytest-timeout", "pytest", "mypy", "pre-commit", "jedi (<0.18)"] +doc = ["myst-parser", "sphinx (>=1.3.6)", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] +test = ["codecov", "coverage", "ipykernel", "ipython", "mock", "mypy", "pre-commit", "pytest", "pytest-asyncio", "pytest-cov", "pytest-timeout", "jedi (<0.18)"] [[package]] name = "jupyter-core" -version = "4.7.1" +version = "4.9.1" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "main" optional = true python-versions = ">=3.6" [package.dependencies] -pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\""} +pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} traitlets = "*" [[package]] @@ -595,12 +655,15 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [[package]] name = "markdown" -version = "3.3.4" +version = "3.3.6" description = "Python implementation of Markdown." category = "main" optional = true python-versions = ">=3.6" +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + [package.extras] testing = ["coverage", "pyyaml"] @@ -614,7 +677,7 @@ python-versions = ">=3.6" [[package]] name = "matplotlib-inline" -version = "0.1.2" +version = "0.1.3" description = "Inline Matplotlib backend for Jupyter" category = "main" optional = true @@ -649,7 +712,7 @@ python-versions = "*" [[package]] name = "mkdocs" -version = "1.2.1" +version = "1.2.3" description = "Project documentation with Markdown." category = "main" optional = true @@ -672,7 +735,7 @@ i18n = ["babel (>=2.9.0)"] [[package]] name = "mkdocs-git-revision-date-localized-plugin" -version = "0.9.2" +version = "0.9.3" description = "Mkdocs plugin that enables displaying the localized date of the last git modification of a markdown file." category = "main" optional = true @@ -685,7 +748,7 @@ mkdocs = ">=1.0" [[package]] name = "mkdocs-macros-plugin" -version = "0.5.5" +version = "0.5.12" description = "Unleash the power of MkDocs with macros and variables" category = "main" optional = true @@ -694,51 +757,40 @@ python-versions = ">=3.5" [package.dependencies] jinja2 = "*" mkdocs = ">=0.17" -mkdocs-material = ">=6.2" python-dateutil = "*" pyyaml = "*" termcolor = "*" [package.extras] -test = ["mkdocs-macros-test", "mkdocs-material"] +test = ["mkdocs-macros-test", "mkdocs-material (>=6.2)", "mkdocs-include-markdown-plugin"] [[package]] name = "mkdocs-material" -version = "7.1.8" +version = "7.3.6" description = "A Material Design theme for MkDocs" category = "main" optional = true python-versions = "*" [package.dependencies] +jinja2 = ">=2.11.1" markdown = ">=3.2" -mkdocs = ">=1.1" +mkdocs = ">=1.2.3" mkdocs-material-extensions = ">=1.0" -Pygments = ">=2.4" -pymdown-extensions = ">=7.0" +pygments = ">=2.10" +pymdown-extensions = ">=9.0" [[package]] name = "mkdocs-material-extensions" -version = "1.0.1" +version = "1.0.3" description = "Extension pack for Python Markdown." category = "main" optional = true -python-versions = ">=3.5" - -[package.dependencies] -mkdocs-material = ">=5.0.0" - -[[package]] -name = "more-itertools" -version = "8.8.0" -description = "More routines for operating on iterables, beyond itertools" -category = "dev" -optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [[package]] name = "moto" -version = "2.0.9" +version = "2.2.16" description = "A library that allows your python tests to easily mock out the boto library" category = "dev" optional = false @@ -750,33 +802,31 @@ botocore = ">=1.12.201" cryptography = ">=3.3.1" Jinja2 = ">=2.10.1" MarkupSafe = "!=2.0.0a1" -more-itertools = "*" python-dateutil = ">=2.1,<3.0.0" pytz = "*" requests = ">=2.5" responses = ">=0.9.0" -six = ">1.9" werkzeug = "*" xmltodict = "*" [package.extras] -all = ["PyYAML (>=5.1)", "ecdsa (<0.15)", "docker (>=2.5.1)", "jsondiff (>=1.1.2)", "aws-xray-sdk (>=0.93,!=0.96)", "idna (>=2.5,<3)", "cfn-lint (>=0.4.0)", "python-jose[cryptography] (>=3.1.0,<3.3.0)", "decorator (<=4.4.2)", "sshpubkeys (==3.1.0)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "sshpubkeys (>=3.1.0)"] -apigateway = ["ecdsa (<0.15)", "python-jose[cryptography] (>=3.1.0,<3.3.0)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] +all = ["PyYAML (>=5.1)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (<0.15)", "docker (>=2.5.1)", "jsondiff (>=1.1.2)", "aws-xray-sdk (>=0.93,!=0.96)", "idna (>=2.5,<4)", "cfn-lint (>=0.4.0)", "sshpubkeys (>=3.1.0)", "setuptools"] +apigateway = ["python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (<0.15)"] awslambda = ["docker (>=2.5.1)"] batch = ["docker (>=2.5.1)"] -cloudformation = ["docker (>=2.5.1)", "PyYAML (>=5.1)", "cfn-lint (>=0.4.0)", "decorator (<=4.4.2)"] -cognitoidp = ["ecdsa (<0.15)", "python-jose[cryptography] (>=3.1.0,<3.3.0)", "python-jose[cryptography] (>=3.1.0,<4.0.0)"] +cloudformation = ["docker (>=2.5.1)", "PyYAML (>=5.1)", "cfn-lint (>=0.4.0)"] +cognitoidp = ["python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (<0.15)"] +ds = ["sshpubkeys (>=3.1.0)"] dynamodb2 = ["docker (>=2.5.1)"] dynamodbstreams = ["docker (>=2.5.1)"] -ec2 = ["docker (>=2.5.1)", "sshpubkeys (==3.1.0)", "sshpubkeys (>=3.1.0)"] +ec2 = ["sshpubkeys (>=3.1.0)"] +efs = ["sshpubkeys (>=3.1.0)"] iotdata = ["jsondiff (>=1.1.2)"] +route53resolver = ["sshpubkeys (>=3.1.0)"] s3 = ["PyYAML (>=5.1)"] -server = ["PyYAML (>=5.1)", "ecdsa (<0.15)", "docker (>=2.5.1)", "jsondiff (>=1.1.2)", "aws-xray-sdk (>=0.93,!=0.96)", "idna (>=2.5,<3)", "cfn-lint (>=0.4.0)", "flask", "flask-cors", "python-jose[cryptography] (>=3.1.0,<3.3.0)", "decorator (<=4.4.2)", "sshpubkeys (==3.1.0)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "sshpubkeys (>=3.1.0)"] -ses = ["docker (>=2.5.1)"] -sns = ["docker (>=2.5.1)"] -sqs = ["docker (>=2.5.1)"] -ssm = ["docker (>=2.5.1)", "PyYAML (>=5.1)", "cfn-lint (>=0.4.0)", "decorator (<=4.4.2)"] -xray = ["aws-xray-sdk (>=0.93,!=0.96)"] +server = ["PyYAML (>=5.1)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (<0.15)", "docker (>=2.5.1)", "jsondiff (>=1.1.2)", "aws-xray-sdk (>=0.93,!=0.96)", "idna (>=2.5,<4)", "cfn-lint (>=0.4.0)", "sshpubkeys (>=3.1.0)", "setuptools", "flask", "flask-cors"] +ssm = ["PyYAML (>=5.1)", "dataclasses"] +xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] [[package]] name = "mypy-extensions" @@ -788,31 +838,30 @@ python-versions = "*" [[package]] name = "nbclient" -version = "0.5.3" +version = "0.5.9" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." category = "main" optional = true python-versions = ">=3.6.1" [package.dependencies] -async-generator = "*" jupyter-client = ">=6.1.5" nbformat = ">=5.0" nest-asyncio = "*" traitlets = ">=4.2" [package.extras] -dev = ["codecov", "coverage", "ipython", "ipykernel", "ipywidgets", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "check-manifest", "flake8", "mypy", "tox", "bumpversion", "xmltodict", "pip (>=18.1)", "wheel (>=0.31.0)", "setuptools (>=38.6.0)", "twine (>=1.11.0)", "black"] +dev = ["codecov", "coverage", "ipython", "ipykernel", "ipywidgets", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "check-manifest", "flake8", "mypy", "tox", "xmltodict", "pip (>=18.1)", "wheel (>=0.31.0)", "setuptools (>=38.6.0)", "twine (>=1.11.0)", "black"] sphinx = ["Sphinx (>=1.7)", "sphinx-book-theme", "mock", "moto", "myst-parser"] -test = ["codecov", "coverage", "ipython", "ipykernel", "ipywidgets", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "check-manifest", "flake8", "mypy", "tox", "bumpversion", "xmltodict", "pip (>=18.1)", "wheel (>=0.31.0)", "setuptools (>=38.6.0)", "twine (>=1.11.0)", "black"] +test = ["codecov", "coverage", "ipython", "ipykernel", "ipywidgets", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "check-manifest", "flake8", "mypy", "tox", "xmltodict", "pip (>=18.1)", "wheel (>=0.31.0)", "setuptools (>=38.6.0)", "twine (>=1.11.0)", "black"] [[package]] name = "nbconvert" -version = "6.0.7" +version = "6.3.0" description = "Converting Jupyter Notebooks" category = "main" optional = true -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] bleach = "*" @@ -827,14 +876,14 @@ nbformat = ">=4.4" pandocfilters = ">=1.4.1" pygments = ">=2.4.1" testpath = "*" -traitlets = ">=4.2" +traitlets = ">=5.0" [package.extras] -all = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (==0.2.2)", "tornado (>=4.0)", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] +all = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (==0.2.6)", "tornado (>=4.0)", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] docs = ["sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] serve = ["tornado (>=4.0)"] -test = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (==0.2.2)"] -webpdf = ["pyppeteer (==0.2.2)"] +test = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (==0.2.6)"] +webpdf = ["pyppeteer (==0.2.6)"] [[package]] name = "nbformat" @@ -872,7 +921,7 @@ python-versions = "*" [[package]] name = "notebook" -version = "6.4.0" +version = "6.4.6" description = "A web-based notebook environment for interactive computing" category = "main" optional = true @@ -887,9 +936,10 @@ jupyter-client = ">=5.3.4" jupyter-core = ">=4.6.1" nbconvert = "*" nbformat = "*" +nest-asyncio = ">=1.5" prometheus-client = "*" pyzmq = ">=17" -Send2Trash = ">=1.5.0" +Send2Trash = ">=1.8.0" terminado = ">=0.8.3" tornado = ">=6.1" traitlets = ">=4.2.1" @@ -901,18 +951,18 @@ test = ["pytest", "coverage", "requests", "nbval", "selenium", "pytest-cov", "re [[package]] name = "packaging" -version = "20.9" +version = "21.3" description = "Core utilities for Python packages" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [package.dependencies] -pyparsing = ">=2.0.2" +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "pandocfilters" -version = "1.4.3" +version = "1.5.0" description = "Utilities for writing pandoc filters in python" category = "main" optional = true @@ -932,11 +982,11 @@ testing = ["docopt", "pytest (<6.0.0)"] [[package]] name = "pathspec" -version = "0.8.1" +version = "0.9.0" description = "Utility library for gitignore style pattern matching of file paths." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] name = "pexpect" @@ -957,20 +1007,33 @@ category = "main" optional = true python-versions = "*" +[[package]] +name = "platformdirs" +version = "2.4.0" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] + [[package]] name = "pluggy" -version = "0.13.1" +version = "1.0.0" description = "plugin and hook calling mechanisms for python" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [package.extras] dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "2.13.0" +version = "2.15.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "main" optional = true @@ -986,7 +1049,7 @@ virtualenv = ">=20.0.8" [[package]] name = "prometheus-client" -version = "0.11.0" +version = "0.12.0" description = "Python client for the Prometheus monitoring system." category = "main" optional = true @@ -997,11 +1060,11 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.18" +version = "3.0.22" description = "Library for building powerful interactive command lines in Python" category = "main" optional = true -python-versions = ">=3.6.1" +python-versions = ">=3.6.2" [package.dependencies] wcwidth = "*" @@ -1016,23 +1079,23 @@ python-versions = "*" [[package]] name = "py" -version = "1.10.0" +version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pycodestyle" -version = "2.7.0" +version = "2.8.0" description = "Python style guide checker" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pycparser" -version = "2.20" +version = "2.21" description = "C parser in Python" category = "main" optional = false @@ -1040,7 +1103,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pyflakes" -version = "2.3.1" +version = "2.4.0" description = "passive checker of Python programs" category = "dev" optional = false @@ -1048,7 +1111,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.9.0" +version = "2.10.0" description = "Pygments is a syntax highlighting package written in Python." category = "main" optional = false @@ -1064,22 +1127,24 @@ python-versions = "*" [[package]] name = "pylint" -version = "2.8.3" +version = "2.11.1" description = "python code static checker" category = "dev" optional = false python-versions = "~=3.6" [package.dependencies] -astroid = "2.5.6" +astroid = ">=2.8.0,<2.9" colorama = {version = "*", markers = "sys_platform == \"win32\""} isort = ">=4.2.5,<6" mccabe = ">=0.6,<0.7" +platformdirs = ">=2.2.0" toml = ">=0.7.1" +typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} [[package]] name = "pymdown-extensions" -version = "8.2" +version = "9.1" description = "Extension pack for Python Markdown." category = "main" optional = true @@ -1090,23 +1155,26 @@ Markdown = ">=3.2" [[package]] name = "pyparsing" -version = "2.4.7" +version = "3.0.6" description = "Python parsing module" category = "main" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.6" + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pyrsistent" -version = "0.17.3" +version = "0.18.0" description = "Persistent/Functional/Immutable data structures" category = "main" optional = true -python-versions = ">=3.5" +python-versions = ">=3.6" [[package]] name = "pytest" -version = "6.2.4" +version = "6.2.5" description = "pytest: simple powerful testing with Python" category = "dev" optional = false @@ -1118,13 +1186,29 @@ attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<1.0.0a1" +pluggy = ">=0.12,<2.0" py = ">=1.8.2" toml = "*" [package.extras] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +[[package]] +name = "pytest-cov" +version = "2.12.1" +description = "Pytest plugin for measuring coverage." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +coverage = ">=5.2.1" +pytest = ">=4.6" +toml = "*" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "six", "pytest-xdist", "virtualenv"] + [[package]] name = "pytest-custom-exit-code" version = "0.3.0" @@ -1138,7 +1222,7 @@ pytest = ">=4.0.2" [[package]] name = "python-dateutil" -version = "2.8.1" +version = "2.8.2" description = "Extensions to the standard Python datetime module" category = "main" optional = false @@ -1149,7 +1233,7 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2021.1" +version = "2021.3" description = "World timezone definitions, modern and historical" category = "main" optional = false @@ -1157,7 +1241,7 @@ python-versions = "*" [[package]] name = "pywin32" -version = "301" +version = "302" description = "Python for Window Extensions" category = "main" optional = true @@ -1165,7 +1249,7 @@ python-versions = "*" [[package]] name = "pywinpty" -version = "1.1.2" +version = "1.1.6" description = "Pseudo terminal support for Windows from Python." category = "main" optional = true @@ -1192,7 +1276,7 @@ pyyaml = "*" [[package]] name = "pyzmq" -version = "22.1.0" +version = "22.3.0" description = "Python bindings for 0MQ" category = "main" optional = true @@ -1204,7 +1288,7 @@ py = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "regex" -version = "2021.4.4" +version = "2021.11.10" description = "Alternative regular expression module, to replace re." category = "main" optional = false @@ -1212,25 +1296,25 @@ python-versions = "*" [[package]] name = "requests" -version = "2.25.1" +version = "2.26.0" description = "Python HTTP for Humans." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] certifi = ">=2017.4.17" -chardet = ">=3.0.2,<5" -idna = ">=2.5,<3" +charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} +idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} urllib3 = ">=1.21.1,<1.27" [package.extras] -security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] [[package]] name = "responses" -version = "0.13.3" +version = "0.16.0" description = "A utility library for mocking out the `requests` Python library." category = "dev" optional = false @@ -1242,15 +1326,15 @@ six = "*" urllib3 = ">=1.25.10" [package.extras] -tests = ["coverage (>=3.7.1,<6.0.0)", "pytest-cov", "pytest-localserver", "flake8", "pytest (>=4.6,<5.0)", "pytest (>=4.6)", "mypy"] +tests = ["coverage (>=3.7.1,<6.0.0)", "pytest-cov", "pytest-localserver", "flake8", "types-mock", "types-requests", "types-six", "pytest (>=4.6,<5.0)", "pytest (>=4.6)", "mypy"] [[package]] name = "s3transfer" -version = "0.4.2" +version = "0.5.0" description = "An Amazon S3 Transfer Manager" category = "main" optional = false -python-versions = "*" +python-versions = ">= 3.6" [package.dependencies] botocore = ">=1.12.36,<2.0a.0" @@ -1260,12 +1344,25 @@ crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] [[package]] name = "send2trash" -version = "1.5.0" +version = "1.8.0" description = "Send file to trash natively under Mac OS X, Windows and Linux." category = "main" optional = true python-versions = "*" +[package.extras] +nativelib = ["pyobjc-framework-cocoa", "pywin32"] +objc = ["pyobjc-framework-cocoa"] +win32 = ["pywin32"] + +[[package]] +name = "shellingham" +version = "1.4.0" +description = "Tool to Detect Surrounding Shell" +category = "main" +optional = true +python-versions = "!=3.0,!=3.1,!=3.2,!=3.3,>=2.6" + [[package]] name = "six" version = "1.16.0" @@ -1276,24 +1373,24 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "smmap" -version = "4.0.0" +version = "5.0.0" description = "A pure Python implementation of a sliding window memory map manager" category = "main" optional = true -python-versions = ">=3.5" +python-versions = ">=3.6" [[package]] name = "structlog" -version = "21.1.0" +version = "21.3.0" description = "Structured Logging for Python" category = "main" optional = false python-versions = ">=3.6" [package.extras] -dev = ["coverage", "freezegun (>=0.2.8)", "pretend", "pytest-asyncio", "pytest-randomly", "pytest (>=6.0)", "simplejson", "furo", "sphinx", "sphinx-toolbox", "twisted", "pre-commit"] -docs = ["furo", "sphinx", "sphinx-toolbox", "twisted"] -tests = ["coverage", "freezegun (>=0.2.8)", "pretend", "pytest-asyncio", "pytest-randomly", "pytest (>=6.0)", "simplejson"] +dev = ["pre-commit", "rich", "cogapp", "tomli", "coverage", "freezegun (>=0.2.8)", "pretend", "pytest-asyncio", "pytest (>=6.0)", "simplejson", "furo", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "twisted"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "sphinxcontrib-mermaid", "twisted"] +tests = ["coverage", "freezegun (>=0.2.8)", "pretend", "pytest-asyncio", "pytest (>=6.0)", "simplejson"] [[package]] name = "termcolor" @@ -1305,7 +1402,7 @@ python-versions = "*" [[package]] name = "terminado" -version = "0.10.1" +version = "0.12.1" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." category = "main" optional = true @@ -1338,6 +1435,14 @@ category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +[[package]] +name = "tomli" +version = "1.2.2" +description = "A lil' TOML parser" +category = "main" +optional = false +python-versions = ">=3.6" + [[package]] name = "tornado" version = "6.1" @@ -1348,7 +1453,7 @@ python-versions = ">= 3.5" [[package]] name = "tox" -version = "3.23.1" +version = "3.24.4" description = "tox is a generic virtualenv management and test command line tool" category = "dev" optional = false @@ -1370,7 +1475,7 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "psutil (>=5.6.1)", "pytes [[package]] name = "tox-poetry" -version = "0.4.0" +version = "0.4.1" description = "Tox poetry plugin" category = "dev" optional = false @@ -1386,21 +1491,26 @@ test = ["coverage", "pytest", "pycodestyle", "pylint"] [[package]] name = "traitlets" -version = "5.0.5" +version = "5.1.1" description = "Traitlets Python configuration system" category = "main" optional = true python-versions = ">=3.7" -[package.dependencies] -ipython-genutils = "*" - [package.extras] test = ["pytest"] +[[package]] +name = "typing-extensions" +version = "4.0.0" +description = "Backported and Experimental Type Hints for Python 3.6+" +category = "main" +optional = false +python-versions = ">=3.6" + [[package]] name = "urllib3" -version = "1.26.5" +version = "1.26.7" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false @@ -1413,32 +1523,33 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.4.7" +version = "20.10.0" description = "Virtual Python Environment builder" category = "main" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [package.dependencies] -appdirs = ">=1.4.3,<2" +"backports.entry-points-selectable" = ">=1.0.4" distlib = ">=0.3.1,<1" -filelock = ">=3.0.0,<4" +filelock = ">=3.2,<4" +platformdirs = ">=2,<3" six = ">=1.9.0,<2" [package.extras] -docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] -testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] +docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] [[package]] name = "watchdog" -version = "2.1.2" +version = "2.1.6" description = "Filesystem events monitoring" category = "main" optional = true python-versions = ">=3.6" [package.extras] -watchmedo = ["PyYAML (>=3.10)", "argh (>=0.24.1)"] +watchmedo = ["PyYAML (>=3.10)"] [[package]] name = "wcwidth" @@ -1458,7 +1569,7 @@ python-versions = "*" [[package]] name = "werkzeug" -version = "2.0.1" +version = "2.0.2" description = "The comprehensive WSGI web application library." category = "dev" optional = false @@ -1469,11 +1580,11 @@ watchdog = ["watchdog"] [[package]] name = "wrapt" -version = "1.12.1" +version = "1.13.3" description = "Module for decorators, wrappers and monkey patching." category = "dev" optional = false -python-versions = "*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] name = "xmltodict" @@ -1485,7 +1596,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "zipp" -version = "3.4.1" +version = "3.6.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = true @@ -1493,54 +1604,39 @@ python-versions = ">=3.6" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [extras] -cli = ["click"] +cli = ["click", "click-completion"] devtools = ["ipdb", "notebook", "pre-commit"] docs = ["blacken-docs", "mkdocs", "mkdocs-git-revision-date-localized-plugin", "mkdocs-macros-plugin", "mkdocs-material"] [metadata] lock-version = "1.1" python-versions = ">=3.9,<4" -content-hash = "0db0fa5b821cd863d9e27750d07d92c15daf375494937d2e5c1d75db1a4a0c5a" +content-hash = "4eea8c38ff4e450fe20397c527191736369e5871f1e626d38643d7b454eae7d6" [metadata.files] -appdirs = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] appnope = [ {file = "appnope-0.1.2-py2.py3-none-any.whl", hash = "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442"}, {file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"}, ] argon2-cffi = [ - {file = "argon2-cffi-20.1.0.tar.gz", hash = "sha256:d8029b2d3e4b4cea770e9e5a0104dd8fa185c1724a0f01528ae4826a6d25f97d"}, - {file = "argon2_cffi-20.1.0-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:6ea92c980586931a816d61e4faf6c192b4abce89aa767ff6581e6ddc985ed003"}, - {file = "argon2_cffi-20.1.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:05a8ac07c7026542377e38389638a8a1e9b78f1cd8439cd7493b39f08dd75fbf"}, - {file = "argon2_cffi-20.1.0-cp27-cp27m-win32.whl", hash = "sha256:0bf066bc049332489bb2d75f69216416329d9dc65deee127152caeb16e5ce7d5"}, - {file = "argon2_cffi-20.1.0-cp27-cp27m-win_amd64.whl", hash = "sha256:57358570592c46c420300ec94f2ff3b32cbccd10d38bdc12dc6979c4a8484fbc"}, - {file = "argon2_cffi-20.1.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:7d455c802727710e9dfa69b74ccaab04568386ca17b0ad36350b622cd34606fe"}, - {file = "argon2_cffi-20.1.0-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:b160416adc0f012fb1f12588a5e6954889510f82f698e23ed4f4fa57f12a0647"}, - {file = "argon2_cffi-20.1.0-cp35-cp35m-win32.whl", hash = "sha256:9bee3212ba4f560af397b6d7146848c32a800652301843df06b9e8f68f0f7361"}, - {file = "argon2_cffi-20.1.0-cp35-cp35m-win_amd64.whl", hash = "sha256:392c3c2ef91d12da510cfb6f9bae52512a4552573a9e27600bdb800e05905d2b"}, - {file = "argon2_cffi-20.1.0-cp36-cp36m-win32.whl", hash = "sha256:ba7209b608945b889457f949cc04c8e762bed4fe3fec88ae9a6b7765ae82e496"}, - {file = "argon2_cffi-20.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:da7f0445b71db6d3a72462e04f36544b0de871289b0bc8a7cc87c0f5ec7079fa"}, - {file = "argon2_cffi-20.1.0-cp37-abi3-macosx_10_6_intel.whl", hash = "sha256:cc0e028b209a5483b6846053d5fd7165f460a1f14774d79e632e75e7ae64b82b"}, - {file = "argon2_cffi-20.1.0-cp37-cp37m-win32.whl", hash = "sha256:18dee20e25e4be86680b178b35ccfc5d495ebd5792cd00781548d50880fee5c5"}, - {file = "argon2_cffi-20.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6678bb047373f52bcff02db8afab0d2a77d83bde61cfecea7c5c62e2335cb203"}, - {file = "argon2_cffi-20.1.0-cp38-cp38-win32.whl", hash = "sha256:77e909cc756ef81d6abb60524d259d959bab384832f0c651ed7dcb6e5ccdbb78"}, - {file = "argon2_cffi-20.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:9dfd5197852530294ecb5795c97a823839258dfd5eb9420233c7cfedec2058f2"}, - {file = "argon2_cffi-20.1.0-cp39-cp39-win32.whl", hash = "sha256:e2db6e85c057c16d0bd3b4d2b04f270a7467c147381e8fd73cbbe5bc719832be"}, - {file = "argon2_cffi-20.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a84934bd818e14a17943de8099d41160da4a336bcc699bb4c394bbb9b94bd32"}, + {file = "argon2-cffi-21.1.0.tar.gz", hash = "sha256:f710b61103d1a1f692ca3ecbd1373e28aa5e545ac625ba067ff2feca1b2bb870"}, + {file = "argon2_cffi-21.1.0-cp35-abi3-macosx_10_14_x86_64.whl", hash = "sha256:217b4f0f853ccbbb5045242946ad2e162e396064575860141b71a85eb47e475a"}, + {file = "argon2_cffi-21.1.0-cp35-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:fa7e7d1fc22514a32b1761fdfa1882b6baa5c36bb3ef557bdd69e6fc9ba14a41"}, + {file = "argon2_cffi-21.1.0-cp35-abi3-win32.whl", hash = "sha256:e4d8f0ae1524b7b0372a3e574a2561cbdddb3fdb6c28b70a72868189bda19659"}, + {file = "argon2_cffi-21.1.0-cp35-abi3-win_amd64.whl", hash = "sha256:65213a9174320a1aee03fe826596e0620783966b49eb636955958b3074e87ff9"}, + {file = "argon2_cffi-21.1.0-pp36-pypy36_pp73-macosx_10_7_x86_64.whl", hash = "sha256:245f64a203012b144b7b8c8ea6d468cb02b37caa5afee5ba4a10c80599334f6a"}, + {file = "argon2_cffi-21.1.0-pp36-pypy36_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4ad152c418f7eb640eac41ac815534e6aa61d1624530b8e7779114ecfbf327f8"}, + {file = "argon2_cffi-21.1.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:bc513db2283c385ea4da31a2cd039c33380701f376f4edd12fe56db118a3b21a"}, + {file = "argon2_cffi-21.1.0-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:c7a7c8cc98ac418002090e4add5bebfff1b915ea1cb459c578cd8206fef10378"}, + {file = "argon2_cffi-21.1.0-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:165cadae5ac1e26644f5ade3bd9c18d89963be51d9ea8817bd671006d7909057"}, + {file = "argon2_cffi-21.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:566ffb581bbd9db5562327aee71b2eda24a1c15b23a356740abe3c011bbe0dcb"}, ] astroid = [ - {file = "astroid-2.5.6-py3-none-any.whl", hash = "sha256:4db03ab5fc3340cf619dbc25e42c2cc3755154ce6009469766d7143d1fc2ee4e"}, - {file = "astroid-2.5.6.tar.gz", hash = "sha256:8a398dfce302c13f14bab13e2b14fe385d32b73f4e4853b9bdfb64598baa1975"}, -] -async-generator = [ - {file = "async_generator-1.10-py3-none-any.whl", hash = "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b"}, - {file = "async_generator-1.10.tar.gz", hash = "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"}, + {file = "astroid-2.8.6-py3-none-any.whl", hash = "sha256:cd8326b424c971e7d87678609cf6275d22028afd37d6ac59c16d47f1245882f6"}, + {file = "astroid-2.8.6.tar.gz", hash = "sha256:5f6f75e45f15290e73b56f9dfde95b4bf96382284cde406ef4203e928335a495"}, ] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, @@ -1558,153 +1654,264 @@ backcall = [ {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, ] +"backports.entry-points-selectable" = [ + {file = "backports.entry_points_selectable-1.1.1-py2.py3-none-any.whl", hash = "sha256:7fceed9532a7aa2bd888654a7314f864a3c16a4e710b34a58cfc0f08114c663b"}, + {file = "backports.entry_points_selectable-1.1.1.tar.gz", hash = "sha256:914b21a479fde881635f7af5adc7f6e38d6b274be32269070c53b698c60d5386"}, +] black = [ - {file = "black-21.6b0-py3-none-any.whl", hash = "sha256:dfb8c5a069012b2ab1e972e7b908f5fb42b6bbabcba0a788b86dc05067c7d9c7"}, - {file = "black-21.6b0.tar.gz", hash = "sha256:dc132348a88d103016726fe360cb9ede02cecf99b76e3660ce6c596be132ce04"}, + {file = "black-21.11b1-py3-none-any.whl", hash = "sha256:802c6c30b637b28645b7fde282ed2569c0cd777dbe493a41b6a03c1d903f99ac"}, + {file = "black-21.11b1.tar.gz", hash = "sha256:a042adbb18b3262faad5aff4e834ff186bb893f95ba3a8013f09de1e5569def2"}, ] blacken-docs = [ - {file = "blacken_docs-1.10.0-py2.py3-none-any.whl", hash = "sha256:149197a0b17e83121fc10aca9eda1417728fdccebde930a6722f97d87ed30f4b"}, - {file = "blacken_docs-1.10.0.tar.gz", hash = "sha256:e2121c95bf2f8a3ebb3110776d276f850f63b8e5753773ba2b4d0f415d862f23"}, + {file = "blacken_docs-1.12.0-py2.py3-none-any.whl", hash = "sha256:a81e0abc9771521f445ee582f469c8ec2f5880c19c369d766bb151f79f642d7b"}, + {file = "blacken_docs-1.12.0.tar.gz", hash = "sha256:3e8138b22c33406cef5946058e535a8aca45cd64b8e7d392b3bd1329fc1f4af8"}, ] bleach = [ - {file = "bleach-3.3.0-py2.py3-none-any.whl", hash = "sha256:6123ddc1052673e52bab52cdc955bcb57a015264a1c57d37bea2f6b817af0125"}, - {file = "bleach-3.3.0.tar.gz", hash = "sha256:98b3170739e5e83dd9dc19633f074727ad848cbedb6026708c8ac2d3b697a433"}, + {file = "bleach-4.1.0-py2.py3-none-any.whl", hash = "sha256:4d2651ab93271d1129ac9cbc679f524565cc8a1b791909c4a51eac4446a15994"}, + {file = "bleach-4.1.0.tar.gz", hash = "sha256:0900d8b37eba61a802ee40ac0061f8c2b5dee29c1927dd1d233e075ebf5a71da"}, ] boto3 = [ - {file = "boto3-1.17.94-py2.py3-none-any.whl", hash = "sha256:6180272094030bda3ee5c242881892cd3d9d19c05cb513945f530e396c7de1e4"}, - {file = "boto3-1.17.94.tar.gz", hash = "sha256:95d814d16fe55ae55e1e4a3db248596f9647a0c42f4796c6e05be0bfaffb1830"}, + {file = "boto3-1.20.11-py3-none-any.whl", hash = "sha256:cd58563dd3f36d5909815752b12c80a2c510c051474f8296e28dbd3ef5634d65"}, + {file = "boto3-1.20.11.tar.gz", hash = "sha256:035191ad6c7e8aed972e1374f4e0ecb38767c497fd6c961e4ae33898b62f78fb"}, ] botocore = [ - {file = "botocore-1.20.94-py2.py3-none-any.whl", hash = "sha256:ba8a7951be535e25219a82dea15c30d7bdf0c51e7c1623c3306248493c1616ac"}, - {file = "botocore-1.20.94.tar.gz", hash = "sha256:60a382a5b2f7d77b1b575d54fba819097526e3fdd0f3004f4d1142d50af0d642"}, + {file = "botocore-1.23.11-py3-none-any.whl", hash = "sha256:133fa0837762587fb4e5da3fb61ac0b45495cd9fd2d2be7679ba64899da1f3ba"}, + {file = "botocore-1.23.11.tar.gz", hash = "sha256:497234f137810909289a600433cec5583ea8dc05a78b644653d76484138d78b9"}, ] certifi = [ - {file = "certifi-2021.5.30-py2.py3-none-any.whl", hash = "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"}, - {file = "certifi-2021.5.30.tar.gz", hash = "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee"}, + {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, + {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, ] cffi = [ - {file = "cffi-1.14.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991"}, - {file = "cffi-1.14.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1"}, - {file = "cffi-1.14.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa"}, - {file = "cffi-1.14.5-cp27-cp27m-win32.whl", hash = "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3"}, - {file = "cffi-1.14.5-cp27-cp27m-win_amd64.whl", hash = "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5"}, - {file = "cffi-1.14.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482"}, - {file = "cffi-1.14.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6"}, - {file = "cffi-1.14.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045"}, - {file = "cffi-1.14.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa"}, - {file = "cffi-1.14.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406"}, - {file = "cffi-1.14.5-cp35-cp35m-win32.whl", hash = "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369"}, - {file = "cffi-1.14.5-cp35-cp35m-win_amd64.whl", hash = "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315"}, - {file = "cffi-1.14.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132"}, - {file = "cffi-1.14.5-cp36-cp36m-win32.whl", hash = "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53"}, - {file = "cffi-1.14.5-cp36-cp36m-win_amd64.whl", hash = "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813"}, - {file = "cffi-1.14.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49"}, - {file = "cffi-1.14.5-cp37-cp37m-win32.whl", hash = "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62"}, - {file = "cffi-1.14.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4"}, - {file = "cffi-1.14.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827"}, - {file = "cffi-1.14.5-cp38-cp38-win32.whl", hash = "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e"}, - {file = "cffi-1.14.5-cp38-cp38-win_amd64.whl", hash = "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396"}, - {file = "cffi-1.14.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee"}, - {file = "cffi-1.14.5-cp39-cp39-win32.whl", hash = "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396"}, - {file = "cffi-1.14.5-cp39-cp39-win_amd64.whl", hash = "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d"}, - {file = "cffi-1.14.5.tar.gz", hash = "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c"}, + {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, + {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, + {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, + {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, + {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, + {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, + {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, + {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, + {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, + {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, + {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, + {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, + {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, + {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, + {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, + {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, + {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, ] cfgv = [ - {file = "cfgv-3.3.0-py2.py3-none-any.whl", hash = "sha256:b449c9c6118fe8cca7fa5e00b9ec60ba08145d281d52164230a69211c5d597a1"}, - {file = "cfgv-3.3.0.tar.gz", hash = "sha256:9e600479b3b99e8af981ecdfc80a0296104ee610cab48a5ae4ffd0b668650eb1"}, + {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, + {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, ] -chardet = [ - {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, - {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, +charset-normalizer = [ + {file = "charset-normalizer-2.0.7.tar.gz", hash = "sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0"}, + {file = "charset_normalizer-2.0.7-py3-none-any.whl", hash = "sha256:f7af805c321bfa1ce6714c51f254e0d5bb5e5834039bc17db7ebe3a4cec9492b"}, ] click = [ - {file = "click-8.0.1-py3-none-any.whl", hash = "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6"}, - {file = "click-8.0.1.tar.gz", hash = "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a"}, + {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, + {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, +] +click-completion = [ + {file = "click-completion-0.5.2.tar.gz", hash = "sha256:5bf816b81367e638a190b6e91b50779007d14301b3f9f3145d68e3cade7bce86"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] +coverage = [ + {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, + {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, + {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, + {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, + {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, + {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, + {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, + {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, + {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, + {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, + {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, + {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, + {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, + {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, + {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, + {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, + {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, + {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, + {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, + {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, + {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, + {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, + {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, + {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, + {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, + {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, + {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, + {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, + {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, + {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, + {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, + {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, + {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, + {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, + {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, + {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, + {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, +] cryptography = [ - {file = "cryptography-3.4.7-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:3d8427734c781ea5f1b41d6589c293089704d4759e34597dce91014ac125aad1"}, - {file = "cryptography-3.4.7-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:8e56e16617872b0957d1c9742a3f94b43533447fd78321514abbe7db216aa250"}, - {file = "cryptography-3.4.7-cp36-abi3-manylinux2010_x86_64.whl", hash = "sha256:37340614f8a5d2fb9aeea67fd159bfe4f5f4ed535b1090ce8ec428b2f15a11f2"}, - {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_aarch64.whl", hash = "sha256:240f5c21aef0b73f40bb9f78d2caff73186700bf1bc6b94285699aff98cc16c6"}, - {file = "cryptography-3.4.7-cp36-abi3-manylinux2014_x86_64.whl", hash = "sha256:1e056c28420c072c5e3cb36e2b23ee55e260cb04eee08f702e0edfec3fb51959"}, - {file = "cryptography-3.4.7-cp36-abi3-win32.whl", hash = "sha256:0f1212a66329c80d68aeeb39b8a16d54ef57071bf22ff4e521657b27372e327d"}, - {file = "cryptography-3.4.7-cp36-abi3-win_amd64.whl", hash = "sha256:de4e5f7f68220d92b7637fc99847475b59154b7a1b3868fb7385337af54ac9ca"}, - {file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:26965837447f9c82f1855e0bc8bc4fb910240b6e0d16a664bb722df3b5b06873"}, - {file = "cryptography-3.4.7-pp36-pypy36_pp73-manylinux2014_x86_64.whl", hash = "sha256:eb8cc2afe8b05acbd84a43905832ec78e7b3873fb124ca190f574dca7389a87d"}, - {file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:7ec5d3b029f5fa2b179325908b9cd93db28ab7b85bb6c1db56b10e0b54235177"}, - {file = "cryptography-3.4.7-pp37-pypy37_pp73-manylinux2014_x86_64.whl", hash = "sha256:ee77aa129f481be46f8d92a1a7db57269a2f23052d5f2433b4621bb457081cc9"}, - {file = "cryptography-3.4.7.tar.gz", hash = "sha256:3d10de8116d25649631977cb37da6cbdd2d6fa0e0281d014a5b7d337255ca713"}, + {file = "cryptography-36.0.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:9511416e85e449fe1de73f7f99b21b3aa04fba4c4d335d30c486ba3756e3a2a6"}, + {file = "cryptography-36.0.0-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:97199a13b772e74cdcdb03760c32109c808aff7cd49c29e9cf4b7754bb725d1d"}, + {file = "cryptography-36.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:494106e9cd945c2cadfce5374fa44c94cfadf01d4566a3b13bb487d2e6c7959e"}, + {file = "cryptography-36.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6fbbbb8aab4053fa018984bb0e95a16faeb051dd8cca15add2a27e267ba02b58"}, + {file = "cryptography-36.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:684993ff6f67000a56454b41bdc7e015429732d65a52d06385b6e9de6181c71e"}, + {file = "cryptography-36.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c702855cd3174666ef0d2d13dcc879090aa9c6c38f5578896407a7028f75b9f"}, + {file = "cryptography-36.0.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d91bc9f535599bed58f6d2e21a2724cb0c3895bf41c6403fe881391d29096f1d"}, + {file = "cryptography-36.0.0-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:b17d83b3d1610e571fedac21b2eb36b816654d6f7496004d6a0d32f99d1d8120"}, + {file = "cryptography-36.0.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8982c19bb90a4fa2aad3d635c6d71814e38b643649b4000a8419f8691f20ac44"}, + {file = "cryptography-36.0.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:24469d9d33217ffd0ce4582dfcf2a76671af115663a95328f63c99ec7ece61a4"}, + {file = "cryptography-36.0.0-cp36-abi3-win32.whl", hash = "sha256:f6a5a85beb33e57998dc605b9dbe7deaa806385fdf5c4810fb849fcd04640c81"}, + {file = "cryptography-36.0.0-cp36-abi3-win_amd64.whl", hash = "sha256:2deab5ec05d83ddcf9b0916319674d3dae88b0e7ee18f8962642d3cde0496568"}, + {file = "cryptography-36.0.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2049f8b87f449fc6190350de443ee0c1dd631f2ce4fa99efad2984de81031681"}, + {file = "cryptography-36.0.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a776bae1629c8d7198396fd93ec0265f8dd2341c553dc32b976168aaf0e6a636"}, + {file = "cryptography-36.0.0-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:aa94d617a4cd4cdf4af9b5af65100c036bce22280ebb15d8b5262e8273ebc6ba"}, + {file = "cryptography-36.0.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:5c49c9e8fb26a567a2b3fa0343c89f5d325447956cc2fc7231c943b29a973712"}, + {file = "cryptography-36.0.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ef216d13ac8d24d9cd851776662f75f8d29c9f2d05cdcc2d34a18d32463a9b0b"}, + {file = "cryptography-36.0.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:231c4a69b11f6af79c1495a0e5a85909686ea8db946935224b7825cfb53827ed"}, + {file = "cryptography-36.0.0-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:f92556f94e476c1b616e6daec5f7ddded2c082efa7cee7f31c7aeda615906ed8"}, + {file = "cryptography-36.0.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d73e3a96c38173e0aa5646c31bf8473bc3564837977dd480f5cbeacf1d7ef3a3"}, + {file = "cryptography-36.0.0.tar.gz", hash = "sha256:52f769ecb4ef39865719aedc67b4b7eae167bafa48dbc2a26dd36fa56460507f"}, +] +dacite = [ + {file = "dacite-1.6.0-py3-none-any.whl", hash = "sha256:4331535f7aabb505c732fa4c3c094313fc0a1d5ea19907bf4726a7819a68b93f"}, + {file = "dacite-1.6.0.tar.gz", hash = "sha256:d48125ed0a0352d3de9f493bf980038088f45f3f9d7498f090b50a847daaa6df"}, +] +debugpy = [ + {file = "debugpy-1.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:70b422c63a833630c33e3f9cdbd9b6971f8c5afd452697e464339a21bbe862ba"}, + {file = "debugpy-1.5.1-cp310-cp310-win32.whl", hash = "sha256:3a457ad9c0059a21a6c7d563c1f18e924f5cf90278c722bd50ede6f56b77c7fe"}, + {file = "debugpy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:5d76a4fd028d8009c3faf1185b4b78ceb2273dd2499447664b03939e0368bb90"}, + {file = "debugpy-1.5.1-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:16db27b4b91991442f91d73604d32080b30de655aca9ba821b1972ea8171021b"}, + {file = "debugpy-1.5.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2b073ad5e8d8c488fbb6a116986858bab0c9c4558f28deb8832c7a5a27405bd6"}, + {file = "debugpy-1.5.1-cp36-cp36m-win32.whl", hash = "sha256:318f81f37341e4e054b4267d39896b73cddb3612ca13b39d7eea45af65165e1d"}, + {file = "debugpy-1.5.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b5b3157372e0e0a1297a8b6b5280bcf1d35a40f436c7973771c972726d1e32d5"}, + {file = "debugpy-1.5.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:1ec3a086e14bba6c472632025b8fe5bdfbaef2afa1ebd5c6615ce6ed8d89bc67"}, + {file = "debugpy-1.5.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:26fbe53cca45a608679094791ce587b6e2798acd1d4777a8b303b07622e85182"}, + {file = "debugpy-1.5.1-cp37-cp37m-win32.whl", hash = "sha256:d876db8c312eeb02d85611e0f696abe66a2c1515e6405943609e725d5ff36f2a"}, + {file = "debugpy-1.5.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4404a62fb5332ea5c8c9132290eef50b3a0ba38cecacad5529e969a783bcbdd7"}, + {file = "debugpy-1.5.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f3a3dca9104aa14fd4210edcce6d9ce2b65bd9618c0b222135a40b9d6e2a9eeb"}, + {file = "debugpy-1.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2df2c373e85871086bd55271c929670cd4e1dba63e94a08d442db830646203b"}, + {file = "debugpy-1.5.1-cp38-cp38-win32.whl", hash = "sha256:82f5f9ce93af6861a0713f804e62ab390bb12a17f113153e47fea8bbb1dfbe36"}, + {file = "debugpy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:17a25ce9d7714f92fc97ef00cc06269d7c2b163094990ada30156ed31d9a5030"}, + {file = "debugpy-1.5.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:01e98c594b3e66d529e40edf314f849cd1a21f7a013298df58cd8e263bf8e184"}, + {file = "debugpy-1.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f73988422b17f071ad3c4383551ace1ba5ed810cbab5f9c362783d22d40a08dc"}, + {file = "debugpy-1.5.1-cp39-cp39-win32.whl", hash = "sha256:23df67fc56d59e386c342428a7953c2c06cc226d8525b11319153e96afb65b0c"}, + {file = "debugpy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:a2aa64f6d2ca7ded8a7e8a4e7cae3bc71866b09876b7b05cecad231779cb9156"}, + {file = "debugpy-1.5.1-py2.py3-none-any.whl", hash = "sha256:194f95dd3e84568b5489aab5689a3a2c044e8fdc06f1890b8b4f70b6b89f2778"}, + {file = "debugpy-1.5.1.zip", hash = "sha256:d2b09e91fbd1efa4f4fda121d49af89501beda50c18ed7499712c71a4bf3452e"}, ] decorator = [ - {file = "decorator-5.0.9-py3-none-any.whl", hash = "sha256:6e5c199c16f7a9f0e3a61a4a54b3d27e7dad0dbdde92b944426cb20914376323"}, - {file = "decorator-5.0.9.tar.gz", hash = "sha256:72ecfba4320a893c53f9706bebb2d55c270c1e51a28789361aa93e4a21319ed5"}, + {file = "decorator-5.1.0-py3-none-any.whl", hash = "sha256:7b12e7c3c6ab203a29e157335e9122cb03de9ab7264b137594103fd4a683b374"}, + {file = "decorator-5.1.0.tar.gz", hash = "sha256:e59913af105b9860aa2c8d3272d9de5a56a4e608db9a2f167a8480b323d529a7"}, ] defusedxml = [ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, ] distlib = [ - {file = "distlib-0.3.2-py2.py3-none-any.whl", hash = "sha256:23e223426b28491b1ced97dc3bbe183027419dfc7982b4fa2f05d5f3ff10711c"}, - {file = "distlib-0.3.2.zip", hash = "sha256:106fef6dc37dd8c0e2c0a60d3fca3e77460a48907f335fa28420463a6f799736"}, + {file = "distlib-0.3.3-py2.py3-none-any.whl", hash = "sha256:c8b54e8454e5bf6237cc84c20e8264c3e991e824ef27e8f1e81049867d861e31"}, + {file = "distlib-0.3.3.zip", hash = "sha256:d982d0751ff6eaaab5e2ec8e691d949ee80eddf01a62eaa96ddb11531fe16b05"}, ] entrypoints = [ {file = "entrypoints-0.3-py2.py3-none-any.whl", hash = "sha256:589f874b313739ad35be6e0cd7efde2a4e9b6fea91edcc34e58ecbb8dbe56d19"}, {file = "entrypoints-0.3.tar.gz", hash = "sha256:c70dd71abe5a8c85e55e12c19bd91ccfeec11a6e99044204511f9ed547d48451"}, ] filelock = [ - {file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"}, - {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"}, + {file = "filelock-3.4.0-py3-none-any.whl", hash = "sha256:2e139a228bcf56dd8b2274a65174d005c4a6b68540ee0bdbb92c76f43f29f7e8"}, + {file = "filelock-3.4.0.tar.gz", hash = "sha256:93d512b32a23baf4cac44ffd72ccf70732aeff7b8050fcaf6d3ec406d954baf4"}, ] flake8 = [ - {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, - {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, + {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, + {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, ] flakehell = [ {file = "flakehell-0.9.0-py3-none-any.whl", hash = "sha256:48a3a9b46136240e52b3b32a78a0826c45f6dcf7d980c30f758c1db5b1439c0b"}, {file = "flakehell-0.9.0.tar.gz", hash = "sha256:208836d8d24194d50cfa4c1fc99f681f3c537cc232edcd06455abc2971460893"}, ] ghp-import = [ - {file = "ghp-import-2.0.1.tar.gz", hash = "sha256:753de2eace6e0f7d4edfb3cce5e3c3b98cd52aadb80163303d1d036bda7b4483"}, + {file = "ghp-import-2.0.2.tar.gz", hash = "sha256:947b3771f11be850c852c64b561c600fdddf794bab363060854c1ee7ad05e071"}, + {file = "ghp_import-2.0.2-py3-none-any.whl", hash = "sha256:5f8962b30b20652cdffa9c5a9812f7de6bcb56ec475acac579807719bf242c46"}, ] gitdb = [ - {file = "gitdb-4.0.7-py3-none-any.whl", hash = "sha256:6c4cc71933456991da20917998acbe6cf4fb41eeaab7d6d67fbc05ecd4c865b0"}, - {file = "gitdb-4.0.7.tar.gz", hash = "sha256:96bf5c08b157a666fec41129e6d327235284cca4c81e92109260f353ba138005"}, + {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, + {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, ] gitpython = [ - {file = "GitPython-3.1.17-py3-none-any.whl", hash = "sha256:29fe82050709760081f588dd50ce83504feddbebdc4da6956d02351552b1c135"}, - {file = "GitPython-3.1.17.tar.gz", hash = "sha256:ee24bdc93dce357630764db659edaf6b8d664d4ff5447ccfeedd2dc5c253f41e"}, + {file = "GitPython-3.1.24-py3-none-any.whl", hash = "sha256:dc0a7f2f697657acc8d7f89033e8b1ea94dd90356b2983bca89dc8d2ab3cc647"}, + {file = "GitPython-3.1.24.tar.gz", hash = "sha256:df83fdf5e684fef7c6ee2c02fc68a5ceb7e7e759d08b694088d0cacb4eba59e5"}, ] graphviz = [ {file = "graphviz-0.16-py2.py3-none-any.whl", hash = "sha256:3cad5517c961090dfc679df6402a57de62d97703e2880a1a46147bb0dc1639eb"}, {file = "graphviz-0.16.zip", hash = "sha256:d2d25af1c199cad567ce4806f0449cb74eb30cf451fd7597251e1da099ac6e57"}, ] identify = [ - {file = "identify-2.2.10-py2.py3-none-any.whl", hash = "sha256:18d0c531ee3dbc112fa6181f34faa179de3f57ea57ae2899754f16a7e0ff6421"}, - {file = "identify-2.2.10.tar.gz", hash = "sha256:5b41f71471bc738e7b586308c3fca172f78940195cb3bf6734c1e66fdac49306"}, + {file = "identify-2.4.0-py2.py3-none-any.whl", hash = "sha256:eba31ca80258de6bb51453084bff4a923187cd2193b9c13710f2516ab30732cc"}, + {file = "identify-2.4.0.tar.gz", hash = "sha256:a33ae873287e81651c7800ca309dc1f84679b763c9c8b30680e16fbfa82f0107"}, ] idna = [ - {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, - {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.5.0-py3-none-any.whl", hash = "sha256:833b26fb89d5de469b24a390e9df088d4e52e4ba33b01dc5e0e4f41b81a16c00"}, - {file = "importlib_metadata-4.5.0.tar.gz", hash = "sha256:b142cc1dd1342f31ff04bb7d022492b09920cb64fed867cd3ea6f80fe3ebd139"}, + {file = "importlib_metadata-4.8.2-py3-none-any.whl", hash = "sha256:53ccfd5c134223e497627b9815d5030edf77d2ed573922f7a0b8f8bb81a1c100"}, + {file = "importlib_metadata-4.8.2.tar.gz", hash = "sha256:75bdec14c397f528724c1bfd9709d660b33a4d2e77387a3358f20b848bb5e5fb"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, @@ -1714,44 +1921,44 @@ ipdb = [ {file = "ipdb-0.13.9.tar.gz", hash = "sha256:951bd9a64731c444fd907a5ce268543020086a697f6be08f7cc2c9a752a278c5"}, ] ipykernel = [ - {file = "ipykernel-5.5.5-py3-none-any.whl", hash = "sha256:29eee66548ee7c2edb7941de60c0ccf0a7a8dd957341db0a49c5e8e6a0fcb712"}, - {file = "ipykernel-5.5.5.tar.gz", hash = "sha256:e976751336b51082a89fc2099fb7f96ef20f535837c398df6eab1283c2070884"}, + {file = "ipykernel-6.5.1-py3-none-any.whl", hash = "sha256:ff0cb4a67326d2f903b7d7a2e63719d082434b46f00536410bd4e3ad2b98f3b7"}, + {file = "ipykernel-6.5.1.tar.gz", hash = "sha256:dd27172bccbbcfef952991e49372e4c6fd1c14eed0df05ebd5b4335cb27a81a2"}, ] ipython = [ - {file = "ipython-7.24.1-py3-none-any.whl", hash = "sha256:d513e93327cf8657d6467c81f1f894adc125334ffe0e4ddd1abbb1c78d828703"}, - {file = "ipython-7.24.1.tar.gz", hash = "sha256:9bc24a99f5d19721fb8a2d1408908e9c0520a17fff2233ffe82620847f17f1b6"}, + {file = "ipython-7.29.0-py3-none-any.whl", hash = "sha256:a658beaf856ce46bc453366d5dc6b2ddc6c481efd3540cb28aa3943819caac9f"}, + {file = "ipython-7.29.0.tar.gz", hash = "sha256:4f69d7423a5a1972f6347ff233e38bbf4df6a150ef20fbb00c635442ac3060aa"}, ] ipython-genutils = [ {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, ] isort = [ - {file = "isort-5.8.0-py3-none-any.whl", hash = "sha256:2bb1680aad211e3c9944dbce1d4ba09a989f04e238296c87fe2139faa26d655d"}, - {file = "isort-5.8.0.tar.gz", hash = "sha256:0a943902919f65c5684ac4e0154b1ad4fac6dcaa5d9f3426b732f1c8b5419be6"}, + {file = "isort-5.10.1-py3-none-any.whl", hash = "sha256:6f62d78e2f89b4500b080fe3a81690850cd254227f27f75c3a0c491a1f351ba7"}, + {file = "isort-5.10.1.tar.gz", hash = "sha256:e8443a5e7a020e9d7f97f1d7d9cd17c88bcb3bc7e218bf9cf5095fe550be2951"}, ] jedi = [ - {file = "jedi-0.18.0-py2.py3-none-any.whl", hash = "sha256:18456d83f65f400ab0c2d3319e48520420ef43b23a086fdc05dff34132f0fb93"}, - {file = "jedi-0.18.0.tar.gz", hash = "sha256:92550a404bad8afed881a137ec9a461fed49eca661414be45059329614ed0707"}, + {file = "jedi-0.18.1-py2.py3-none-any.whl", hash = "sha256:637c9635fcf47945ceb91cd7f320234a7be540ded6f3e99a50cb6febdfd1ba8d"}, + {file = "jedi-0.18.1.tar.gz", hash = "sha256:74137626a64a99c8eb6ae5832d99b3bdd7d29a3850fe2aa80a4126b2a7d949ab"}, ] jinja2 = [ - {file = "Jinja2-3.0.1-py3-none-any.whl", hash = "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4"}, - {file = "Jinja2-3.0.1.tar.gz", hash = "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4"}, + {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, + {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, ] jmespath = [ {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"}, {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"}, ] jsonschema = [ - {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, - {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, + {file = "jsonschema-4.2.1-py3-none-any.whl", hash = "sha256:2a0f162822a64d95287990481b45d82f096e99721c86534f48201b64ebca6e8c"}, + {file = "jsonschema-4.2.1.tar.gz", hash = "sha256:390713469ae64b8a58698bb3cbc3859abe6925b565a973f87323ef21b09a27a8"}, ] jupyter-client = [ - {file = "jupyter_client-6.2.0-py3-none-any.whl", hash = "sha256:9715152067e3f7ea3b56f341c9a0f9715c8c7cc316ee0eb13c3c84f5ca0065f5"}, - {file = "jupyter_client-6.2.0.tar.gz", hash = "sha256:e2ab61d79fbf8b56734a4c2499f19830fbd7f6fefb3e87868ef0545cb3c17eb9"}, + {file = "jupyter_client-7.1.0-py3-none-any.whl", hash = "sha256:64d93752d8cbfba0c1030c3335c3f0d9797cd1efac012652a14aac1653db11a3"}, + {file = "jupyter_client-7.1.0.tar.gz", hash = "sha256:a5f995a73cffb314ed262713ae6dfce53c6b8216cea9f332071b8ff44a6e1654"}, ] jupyter-core = [ - {file = "jupyter_core-4.7.1-py3-none-any.whl", hash = "sha256:8c6c0cac5c1b563622ad49321d5ec47017bd18b94facb381c6973a0486395f8e"}, - {file = "jupyter_core-4.7.1.tar.gz", hash = "sha256:79025cb3225efcd36847d0840f3fc672c0abd7afd0de83ba8a1d3837619122b4"}, + {file = "jupyter_core-4.9.1-py3-none-any.whl", hash = "sha256:1c091f3bbefd6f2a8782f2c1db662ca8478ac240e962ae2c66f0b87c818154ea"}, + {file = "jupyter_core-4.9.1.tar.gz", hash = "sha256:dce8a7499da5a53ae3afd5a9f4b02e5df1d57250cf48f3ad79da23b4778cd6fa"}, ] jupyterlab-pygments = [ {file = "jupyterlab_pygments-0.1.2-py2.py3-none-any.whl", hash = "sha256:abfb880fd1561987efaefcb2d2ac75145d2a5d0139b1876d5be806e32f630008"}, @@ -1782,8 +1989,8 @@ lazy-object-proxy = [ {file = "lazy_object_proxy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:f5144c75445ae3ca2057faac03fda5a902eff196702b0a24daf1d6ce0650514b"}, ] markdown = [ - {file = "Markdown-3.3.4-py3-none-any.whl", hash = "sha256:96c3ba1261de2f7547b46a00ea8463832c921d3f9d6aba3f255a6f71386db20c"}, - {file = "Markdown-3.3.4.tar.gz", hash = "sha256:31b5b491868dcc87d6c24b7e3d19a0d730d59d3e46f4eea6430a321bed387a49"}, + {file = "Markdown-3.3.6-py3-none-any.whl", hash = "sha256:9923332318f843411e9932237530df53162e29dc7a4e2b91e35764583c46c9a3"}, + {file = "Markdown-3.3.6.tar.gz", hash = "sha256:76df8ae32294ec39dcf89340382882dfa12975f87f45c3ed1ecdb1e8cefc7006"}, ] markupsafe = [ {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, @@ -1822,8 +2029,8 @@ markupsafe = [ {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, ] matplotlib-inline = [ - {file = "matplotlib-inline-0.1.2.tar.gz", hash = "sha256:f41d5ff73c9f5385775d5c0bc13b424535c8402fe70ea8210f93e11f3683993e"}, - {file = "matplotlib_inline-0.1.2-py3-none-any.whl", hash = "sha256:5cf1176f554abb4fa98cb362aa2b55c500147e4bdbb07e3fda359143e1da0811"}, + {file = "matplotlib-inline-0.1.3.tar.gz", hash = "sha256:a04bfba22e0d1395479f866853ec1ee28eea1485c1d69a6faf00dc3e24ff34ee"}, + {file = "matplotlib_inline-0.1.3-py3-none-any.whl", hash = "sha256:aed605ba3b72462d64d475a21a9296f400a19c4f74a31b59103d2a99ffd5aa5c"}, ] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, @@ -1838,44 +2045,40 @@ mistune = [ {file = "mistune-0.8.4.tar.gz", hash = "sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e"}, ] mkdocs = [ - {file = "mkdocs-1.2.1-py3-none-any.whl", hash = "sha256:11141126e5896dd9d279b3e4814eb488e409a0990fb638856255020406a8e2e7"}, - {file = "mkdocs-1.2.1.tar.gz", hash = "sha256:6e0ea175366e3a50d334597b0bc042b8cebd512398cdd3f6f34842d0ef524905"}, + {file = "mkdocs-1.2.3-py3-none-any.whl", hash = "sha256:a1fa8c2d0c1305d7fc2b9d9f607c71778572a8b110fb26642aa00296c9e6d072"}, + {file = "mkdocs-1.2.3.tar.gz", hash = "sha256:89f5a094764381cda656af4298727c9f53dc3e602983087e1fe96ea1df24f4c1"}, ] mkdocs-git-revision-date-localized-plugin = [ - {file = "mkdocs-git-revision-date-localized-plugin-0.9.2.tar.gz", hash = "sha256:c15c76d5baa1f8f37e3a4146b9a6f1b00c5a361ea959ada0703fd9ec462afbe3"}, - {file = "mkdocs_git_revision_date_localized_plugin-0.9.2-py3-none-any.whl", hash = "sha256:d4b21eeb9f212efd314a05e771cf69f2bab1f51bdaa5c9955d09410a396a069b"}, + {file = "mkdocs-git-revision-date-localized-plugin-0.9.3.tar.gz", hash = "sha256:8f936bdf912fc1943bbb8c6027e7de6a348f9197bdb07f85a62ff1c3ee38da11"}, + {file = "mkdocs_git_revision_date_localized_plugin-0.9.3-py3-none-any.whl", hash = "sha256:a2ad799752862c0a089e9af12d8aeb2b17680d6a8502fa449eeecc638fc98722"}, ] mkdocs-macros-plugin = [ - {file = "mkdocs-macros-plugin-0.5.5.tar.gz", hash = "sha256:e0302836397c787505a7b1666cb41b9f4b34d894f816f040e4d738839e76cc73"}, - {file = "mkdocs_macros_plugin-0.5.5-py3-none-any.whl", hash = "sha256:def642db46bb42f2d7e532954dba4c5c0574079366927847da16e784b7986a9d"}, + {file = "mkdocs-macros-plugin-0.5.12.tar.gz", hash = "sha256:106aeb1ff3ba580e4884aec225ec6891ed8c93d083056088f8e66f3309fcd579"}, + {file = "mkdocs_macros_plugin-0.5.12-py3-none-any.whl", hash = "sha256:43b7ea0259e2e18a7fe52b73a548c5ea38bb99bc9ab37d026cbdd42e45769c65"}, ] mkdocs-material = [ - {file = "mkdocs-material-7.1.8.tar.gz", hash = "sha256:e555c66ece5eab7023c4733270dc7627280e707e5082dab278d6a7a4881d2435"}, - {file = "mkdocs_material-7.1.8-py2.py3-none-any.whl", hash = "sha256:08eaf9f77c6d026706397bae2c50d202cfe3a81ef984027b671b4acd365dfc5b"}, + {file = "mkdocs-material-7.3.6.tar.gz", hash = "sha256:1b1dbd8ef2508b358d93af55a5c5db3f141c95667fad802301ec621c40c7c217"}, + {file = "mkdocs_material-7.3.6-py2.py3-none-any.whl", hash = "sha256:1b6b3e9e09f922c2d7f1160fe15c8f43d4adc0d6fb81aa6ff0cbc7ef5b78ec75"}, ] mkdocs-material-extensions = [ - {file = "mkdocs-material-extensions-1.0.1.tar.gz", hash = "sha256:6947fb7f5e4291e3c61405bad3539d81e0b3cd62ae0d66ced018128af509c68f"}, - {file = "mkdocs_material_extensions-1.0.1-py3-none-any.whl", hash = "sha256:d90c807a88348aa6d1805657ec5c0b2d8d609c110e62b9dce4daf7fa981fa338"}, -] -more-itertools = [ - {file = "more-itertools-8.8.0.tar.gz", hash = "sha256:83f0308e05477c68f56ea3a888172c78ed5d5b3c282addb67508e7ba6c8f813a"}, - {file = "more_itertools-8.8.0-py3-none-any.whl", hash = "sha256:2cf89ec599962f2ddc4d568a05defc40e0a587fbc10d5989713638864c36be4d"}, + {file = "mkdocs-material-extensions-1.0.3.tar.gz", hash = "sha256:bfd24dfdef7b41c312ede42648f9eb83476ea168ec163b613f9abd12bbfddba2"}, + {file = "mkdocs_material_extensions-1.0.3-py3-none-any.whl", hash = "sha256:a82b70e533ce060b2a5d9eb2bc2e1be201cf61f901f93704b4acf6e3d5983a44"}, ] moto = [ - {file = "moto-2.0.9-py2.py3-none-any.whl", hash = "sha256:93f820d56c69ba4b34a54e3b97723c8202b33524a0bf8e47ccbab2eed9bb7f41"}, - {file = "moto-2.0.9.tar.gz", hash = "sha256:d46023795ce991c5cbbb3e68666133d57eb5d2b59805585f54eec7d40be8d05f"}, + {file = "moto-2.2.16-py2.py3-none-any.whl", hash = "sha256:9dd772426c56083a43c7d9c543183b644d70039df32e907d05bc7d7189a4dde1"}, + {file = "moto-2.2.16.tar.gz", hash = "sha256:fef15c831b1356b22beeb4f1b4694945597485bdc111039f1b304ab091141444"}, ] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] nbclient = [ - {file = "nbclient-0.5.3-py3-none-any.whl", hash = "sha256:e79437364a2376892b3f46bedbf9b444e5396cfb1bc366a472c37b48e9551500"}, - {file = "nbclient-0.5.3.tar.gz", hash = "sha256:db17271330c68c8c88d46d72349e24c147bb6f34ec82d8481a8f025c4d26589c"}, + {file = "nbclient-0.5.9-py3-none-any.whl", hash = "sha256:8a307be4129cce5f70eb83a57c3edbe45656623c31de54e38bb6fdfbadc428b3"}, + {file = "nbclient-0.5.9.tar.gz", hash = "sha256:99e46ddafacd0b861293bf246fed8540a184adfa3aa7d641f89031ec070701e0"}, ] nbconvert = [ - {file = "nbconvert-6.0.7-py3-none-any.whl", hash = "sha256:39e9f977920b203baea0be67eea59f7b37a761caa542abe80f5897ce3cf6311d"}, - {file = "nbconvert-6.0.7.tar.gz", hash = "sha256:cbbc13a86dfbd4d1b5dee106539de0795b4db156c894c2c5dc382062bbc29002"}, + {file = "nbconvert-6.3.0-py3-none-any.whl", hash = "sha256:8f23fbeabda4a500685d788ee091bf22cf34119304314304fb39f16e2fc32f37"}, + {file = "nbconvert-6.3.0.tar.gz", hash = "sha256:5e77d6203854944520105e38f2563a813a4a3708e8563aa598928a3b5ee1081a"}, ] nbformat = [ {file = "nbformat-5.1.3-py3-none-any.whl", hash = "sha256:eb8447edd7127d043361bc17f2f5a807626bc8e878c7709a1c647abda28a9171"}, @@ -1890,23 +2093,24 @@ nodeenv = [ {file = "nodeenv-1.6.0.tar.gz", hash = "sha256:3ef13ff90291ba2a4a7a4ff9a979b63ffdd00a464dbe04acf0ea6471517a4c2b"}, ] notebook = [ - {file = "notebook-6.4.0-py3-none-any.whl", hash = "sha256:f7f0a71a999c7967d9418272ae4c3378a220bd28330fbfb49860e46cf8a5838a"}, - {file = "notebook-6.4.0.tar.gz", hash = "sha256:9c4625e2a2aa49d6eae4ce20cbc3d8976db19267e32d2a304880e0c10bf8aef9"}, + {file = "notebook-6.4.6-py3-none-any.whl", hash = "sha256:5cad068fa82cd4fb98d341c052100ed50cd69fbfb4118cb9b8ab5a346ef27551"}, + {file = "notebook-6.4.6.tar.gz", hash = "sha256:7bcdf79bd1cda534735bd9830d2cbedab4ee34d8fe1df6e7b946b3aab0902ba3"}, ] packaging = [ - {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, - {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] pandocfilters = [ - {file = "pandocfilters-1.4.3.tar.gz", hash = "sha256:bc63fbb50534b4b1f8ebe1860889289e8af94a23bff7445259592df25a3906eb"}, + {file = "pandocfilters-1.5.0-py2.py3-none-any.whl", hash = "sha256:33aae3f25fd1a026079f5d27bdd52496f0e0803b3469282162bafdcbdf6ef14f"}, + {file = "pandocfilters-1.5.0.tar.gz", hash = "sha256:0b679503337d233b4339a817bfc8c50064e2eff681314376a47cb582305a7a38"}, ] parso = [ {file = "parso-0.8.2-py2.py3-none-any.whl", hash = "sha256:a8c4922db71e4fdb90e0d0bc6e50f9b273d3397925e5e60a717e719201778d22"}, {file = "parso-0.8.2.tar.gz", hash = "sha256:12b83492c6239ce32ff5eed6d3639d6a536170723c6f3f1506869f1ace413398"}, ] pathspec = [ - {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, - {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, + {file = "pathspec-0.9.0-py2.py3-none-any.whl", hash = "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a"}, + {file = "pathspec-0.9.0.tar.gz", hash = "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1"}, ] pexpect = [ {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, @@ -1916,99 +2120,128 @@ pickleshare = [ {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, ] +platformdirs = [ + {file = "platformdirs-2.4.0-py3-none-any.whl", hash = "sha256:8868bbe3c3c80d42f20156f22e7131d2fb321f5bc86a2a345375c6481a67021d"}, + {file = "platformdirs-2.4.0.tar.gz", hash = "sha256:367a5e80b3d04d2428ffa76d33f124cf11e8fff2acdaa9b43d545f5c7d661ef2"}, +] pluggy = [ - {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, - {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] pre-commit = [ - {file = "pre_commit-2.13.0-py2.py3-none-any.whl", hash = "sha256:b679d0fddd5b9d6d98783ae5f10fd0c4c59954f375b70a58cbe1ce9bcf9809a4"}, - {file = "pre_commit-2.13.0.tar.gz", hash = "sha256:764972c60693dc668ba8e86eb29654ec3144501310f7198742a767bec385a378"}, + {file = "pre_commit-2.15.0-py2.py3-none-any.whl", hash = "sha256:a4ed01000afcb484d9eb8d504272e642c4c4099bbad3a6b27e519bd6a3e928a6"}, + {file = "pre_commit-2.15.0.tar.gz", hash = "sha256:3c25add78dbdfb6a28a651780d5c311ac40dd17f160eb3954a0c59da40a505a7"}, ] prometheus-client = [ - {file = "prometheus_client-0.11.0-py2.py3-none-any.whl", hash = "sha256:b014bc76815eb1399da8ce5fc84b7717a3e63652b0c0f8804092c9363acab1b2"}, - {file = "prometheus_client-0.11.0.tar.gz", hash = "sha256:3a8baade6cb80bcfe43297e33e7623f3118d660d41387593758e2fb1ea173a86"}, + {file = "prometheus_client-0.12.0-py2.py3-none-any.whl", hash = "sha256:317453ebabff0a1b02df7f708efbab21e3489e7072b61cb6957230dd004a0af0"}, + {file = "prometheus_client-0.12.0.tar.gz", hash = "sha256:1b12ba48cee33b9b0b9de64a1047cbd3c5f2d0ab6ebcead7ddda613a750ec3c5"}, ] prompt-toolkit = [ - {file = "prompt_toolkit-3.0.18-py3-none-any.whl", hash = "sha256:bf00f22079f5fadc949f42ae8ff7f05702826a97059ffcc6281036ad40ac6f04"}, - {file = "prompt_toolkit-3.0.18.tar.gz", hash = "sha256:e1b4f11b9336a28fa11810bc623c357420f69dfdb6d2dac41ca2c21a55c033bc"}, + {file = "prompt_toolkit-3.0.22-py3-none-any.whl", hash = "sha256:48d85cdca8b6c4f16480c7ce03fd193666b62b0a21667ca56b4bb5ad679d1170"}, + {file = "prompt_toolkit-3.0.22.tar.gz", hash = "sha256:449f333dd120bd01f5d296a8ce1452114ba3a71fae7288d2f0ae2c918764fa72"}, ] ptyprocess = [ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, ] py = [ - {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, - {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] pycodestyle = [ - {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, - {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, + {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, + {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, ] pycparser = [ - {file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"}, - {file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"}, + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] pyflakes = [ - {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, - {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, + {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, + {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, ] pygments = [ - {file = "Pygments-2.9.0-py3-none-any.whl", hash = "sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e"}, - {file = "Pygments-2.9.0.tar.gz", hash = "sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f"}, + {file = "Pygments-2.10.0-py3-none-any.whl", hash = "sha256:b8e67fe6af78f492b3c4b3e2970c0624cbf08beb1e493b2c99b9fa1b67a20380"}, + {file = "Pygments-2.10.0.tar.gz", hash = "sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6"}, ] pyhumps = [ {file = "pyhumps-3.0.2-py3-none-any.whl", hash = "sha256:367b1aadcaa64f8196a3cd14f56559a5602950aeb8486f49318e7394f5e18052"}, {file = "pyhumps-3.0.2.tar.gz", hash = "sha256:042b4b6eec6c1f862f8310c0eebbae19293e9edab8cafb030ff78c890ef1aa34"}, ] pylint = [ - {file = "pylint-2.8.3-py3-none-any.whl", hash = "sha256:792b38ff30903884e4a9eab814ee3523731abd3c463f3ba48d7b627e87013484"}, - {file = "pylint-2.8.3.tar.gz", hash = "sha256:0a049c5d47b629d9070c3932d13bff482b12119b6a241a93bc460b0be16953c8"}, + {file = "pylint-2.11.1-py3-none-any.whl", hash = "sha256:0f358e221c45cbd4dad2a1e4b883e75d28acdcccd29d40c76eb72b307269b126"}, + {file = "pylint-2.11.1.tar.gz", hash = "sha256:2c9843fff1a88ca0ad98a256806c82c5a8f86086e7ccbdb93297d86c3f90c436"}, ] pymdown-extensions = [ - {file = "pymdown-extensions-8.2.tar.gz", hash = "sha256:b6daa94aad9e1310f9c64c8b1f01e4ce82937ab7eb53bfc92876a97aca02a6f4"}, - {file = "pymdown_extensions-8.2-py3-none-any.whl", hash = "sha256:141452d8ed61165518f2c923454bf054866b85cf466feedb0eb68f04acdc2560"}, + {file = "pymdown-extensions-9.1.tar.gz", hash = "sha256:74247f2c80f1d9e3c7242abe1c16317da36c6f26c7ad4b8a7f457f0ec20f0365"}, + {file = "pymdown_extensions-9.1-py3-none-any.whl", hash = "sha256:b03e66f91f33af4a6e7a0e20c740313522995f69a03d86316b1449766c473d0e"}, ] pyparsing = [ - {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, - {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, + {file = "pyparsing-3.0.6-py3-none-any.whl", hash = "sha256:04ff808a5b90911829c55c4e26f75fa5ca8a2f5f36aa3a51f68e27033341d3e4"}, + {file = "pyparsing-3.0.6.tar.gz", hash = "sha256:d9bdec0013ef1eb5a84ab39a3b3868911598afa494f5faa038647101504e2b81"}, ] pyrsistent = [ - {file = "pyrsistent-0.17.3.tar.gz", hash = "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f4c8cabb46ff8e5d61f56a037974228e978f26bfefce4f61a4b1ac0ba7a2ab72"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:da6e5e818d18459fa46fac0a4a4e543507fe1110e808101277c5a2b5bab0cd2d"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5e4395bbf841693eaebaa5bb5c8f5cdbb1d139e07c975c682ec4e4f8126e03d2"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-win32.whl", hash = "sha256:527be2bfa8dc80f6f8ddd65242ba476a6c4fb4e3aedbf281dfbac1b1ed4165b1"}, + {file = "pyrsistent-0.18.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2aaf19dc8ce517a8653746d98e962ef480ff34b6bc563fc067be6401ffb457c7"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58a70d93fb79dc585b21f9d72487b929a6fe58da0754fa4cb9f279bb92369396"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4916c10896721e472ee12c95cdc2891ce5890898d2f9907b1b4ae0f53588b710"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:73ff61b1411e3fb0ba144b8f08d6749749775fe89688093e1efef9839d2dcc35"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-win32.whl", hash = "sha256:b29b869cf58412ca5738d23691e96d8aff535e17390128a1a52717c9a109da4f"}, + {file = "pyrsistent-0.18.0-cp37-cp37m-win_amd64.whl", hash = "sha256:097b96f129dd36a8c9e33594e7ebb151b1515eb52cceb08474c10a5479e799f2"}, + {file = "pyrsistent-0.18.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:772e94c2c6864f2cd2ffbe58bb3bdefbe2a32afa0acb1a77e472aac831f83427"}, + {file = "pyrsistent-0.18.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c1a9ff320fa699337e05edcaae79ef8c2880b52720bc031b219e5b5008ebbdef"}, + {file = "pyrsistent-0.18.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cd3caef37a415fd0dae6148a1b6957a8c5f275a62cca02e18474608cb263640c"}, + {file = "pyrsistent-0.18.0-cp38-cp38-win32.whl", hash = "sha256:e79d94ca58fcafef6395f6352383fa1a76922268fa02caa2272fff501c2fdc78"}, + {file = "pyrsistent-0.18.0-cp38-cp38-win_amd64.whl", hash = "sha256:a0c772d791c38bbc77be659af29bb14c38ced151433592e326361610250c605b"}, + {file = "pyrsistent-0.18.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d5ec194c9c573aafaceebf05fc400656722793dac57f254cd4741f3c27ae57b4"}, + {file = "pyrsistent-0.18.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:6b5eed00e597b5b5773b4ca30bd48a5774ef1e96f2a45d105db5b4ebb4bca680"}, + {file = "pyrsistent-0.18.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:48578680353f41dca1ca3dc48629fb77dfc745128b56fc01096b2530c13fd426"}, + {file = "pyrsistent-0.18.0-cp39-cp39-win32.whl", hash = "sha256:f3ef98d7b76da5eb19c37fda834d50262ff9167c65658d1d8f974d2e4d90676b"}, + {file = "pyrsistent-0.18.0-cp39-cp39-win_amd64.whl", hash = "sha256:404e1f1d254d314d55adb8d87f4f465c8693d6f902f67eb6ef5b4526dc58e6ea"}, + {file = "pyrsistent-0.18.0.tar.gz", hash = "sha256:773c781216f8c2900b42a7b638d5b517bb134ae1acbebe4d1e8f1f41ea60eb4b"}, ] pytest = [ - {file = "pytest-6.2.4-py3-none-any.whl", hash = "sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890"}, - {file = "pytest-6.2.4.tar.gz", hash = "sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b"}, + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, +] +pytest-cov = [ + {file = "pytest-cov-2.12.1.tar.gz", hash = "sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7"}, + {file = "pytest_cov-2.12.1-py2.py3-none-any.whl", hash = "sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a"}, ] pytest-custom-exit-code = [ {file = "pytest-custom_exit_code-0.3.0.tar.gz", hash = "sha256:51ffff0ee2c1ddcc1242e2ddb2a5fd02482717e33a2326ef330e3aa430244635"}, {file = "pytest_custom_exit_code-0.3.0-py3-none-any.whl", hash = "sha256:6e0ce6e57ce3a583cb7e5023f7d1021e19dfec22be41d9ad345bae2fc61caf3b"}, ] python-dateutil = [ - {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, - {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] pytz = [ - {file = "pytz-2021.1-py2.py3-none-any.whl", hash = "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"}, - {file = "pytz-2021.1.tar.gz", hash = "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da"}, + {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, + {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, ] pywin32 = [ - {file = "pywin32-301-cp35-cp35m-win32.whl", hash = "sha256:93367c96e3a76dfe5003d8291ae16454ca7d84bb24d721e0b74a07610b7be4a7"}, - {file = "pywin32-301-cp35-cp35m-win_amd64.whl", hash = "sha256:9635df6998a70282bd36e7ac2a5cef9ead1627b0a63b17c731312c7a0daebb72"}, - {file = "pywin32-301-cp36-cp36m-win32.whl", hash = "sha256:c866f04a182a8cb9b7855de065113bbd2e40524f570db73ef1ee99ff0a5cc2f0"}, - {file = "pywin32-301-cp36-cp36m-win_amd64.whl", hash = "sha256:dafa18e95bf2a92f298fe9c582b0e205aca45c55f989937c52c454ce65b93c78"}, - {file = "pywin32-301-cp37-cp37m-win32.whl", hash = "sha256:98f62a3f60aa64894a290fb7494bfa0bfa0a199e9e052e1ac293b2ad3cd2818b"}, - {file = "pywin32-301-cp37-cp37m-win_amd64.whl", hash = "sha256:fb3b4933e0382ba49305cc6cd3fb18525df7fd96aa434de19ce0878133bf8e4a"}, - {file = "pywin32-301-cp38-cp38-win32.whl", hash = "sha256:88981dd3cfb07432625b180f49bf4e179fb8cbb5704cd512e38dd63636af7a17"}, - {file = "pywin32-301-cp38-cp38-win_amd64.whl", hash = "sha256:8c9d33968aa7fcddf44e47750e18f3d034c3e443a707688a008a2e52bbef7e96"}, - {file = "pywin32-301-cp39-cp39-win32.whl", hash = "sha256:595d397df65f1b2e0beaca63a883ae6d8b6df1cdea85c16ae85f6d2e648133fe"}, - {file = "pywin32-301-cp39-cp39-win_amd64.whl", hash = "sha256:87604a4087434cd814ad8973bd47d6524bd1fa9e971ce428e76b62a5e0860fdf"}, + {file = "pywin32-302-cp310-cp310-win32.whl", hash = "sha256:251b7a9367355ccd1a4cd69cd8dd24bd57b29ad83edb2957cfa30f7ed9941efa"}, + {file = "pywin32-302-cp310-cp310-win_amd64.whl", hash = "sha256:79cf7e6ddaaf1cd47a9e50cc74b5d770801a9db6594464137b1b86aa91edafcc"}, + {file = "pywin32-302-cp36-cp36m-win32.whl", hash = "sha256:fe21c2fb332d03dac29de070f191bdbf14095167f8f2165fdc57db59b1ecc006"}, + {file = "pywin32-302-cp36-cp36m-win_amd64.whl", hash = "sha256:d3761ab4e8c5c2dbc156e2c9ccf38dd51f936dc77e58deb940ffbc4b82a30528"}, + {file = "pywin32-302-cp37-cp37m-win32.whl", hash = "sha256:48dd4e348f1ee9538dd4440bf201ea8c110ea6d9f3a5010d79452e9fa80480d9"}, + {file = "pywin32-302-cp37-cp37m-win_amd64.whl", hash = "sha256:496df89f10c054c9285cc99f9d509e243f4e14ec8dfc6d78c9f0bf147a893ab1"}, + {file = "pywin32-302-cp38-cp38-win32.whl", hash = "sha256:e372e477d938a49266136bff78279ed14445e00718b6c75543334351bf535259"}, + {file = "pywin32-302-cp38-cp38-win_amd64.whl", hash = "sha256:543552e66936378bd2d673c5a0a3d9903dba0b0a87235ef0c584f058ceef5872"}, + {file = "pywin32-302-cp39-cp39-win32.whl", hash = "sha256:2393c1a40dc4497fd6161b76801b8acd727c5610167762b7c3e9fd058ef4a6ab"}, + {file = "pywin32-302-cp39-cp39-win_amd64.whl", hash = "sha256:af5aea18167a31efcacc9f98a2ca932c6b6a6d91ebe31f007509e293dea12580"}, ] pywinpty = [ - {file = "pywinpty-1.1.2-cp36-none-win_amd64.whl", hash = "sha256:7bb1b8380bc71bf04a983e803746b1ea7b8a91765723a82e108df81538b258c1"}, - {file = "pywinpty-1.1.2-cp37-none-win_amd64.whl", hash = "sha256:951f1b988c2407e9bd0c5c9b199f588673769abf0c8cb4724a01bc0666b97b0a"}, - {file = "pywinpty-1.1.2-cp38-none-win_amd64.whl", hash = "sha256:b3a38a0afb63b639ca4f78f67f4f8caa78ca470bd71b146480ef37d86cc99823"}, - {file = "pywinpty-1.1.2-cp39-none-win_amd64.whl", hash = "sha256:eac78a3ff69ce443ad9f67620bc60469f6354b18388570c63af6fc643beae498"}, - {file = "pywinpty-1.1.2.tar.gz", hash = "sha256:f1718838e1c7c700e5f0b79d5d5e05243ff583313ff88e47bb94318ba303e565"}, + {file = "pywinpty-1.1.6-cp310-none-win_amd64.whl", hash = "sha256:5f526f21b569b5610a61e3b6126259c76da979399598e5154498582df3736ade"}, + {file = "pywinpty-1.1.6-cp36-none-win_amd64.whl", hash = "sha256:7576e14f42b31fa98b62d24ded79754d2ea4625570c016b38eb347ce158a30f2"}, + {file = "pywinpty-1.1.6-cp37-none-win_amd64.whl", hash = "sha256:979ffdb9bdbe23db3f46fc7285fd6dbb86b80c12325a50582b211b3894072354"}, + {file = "pywinpty-1.1.6-cp38-none-win_amd64.whl", hash = "sha256:2308b1fc77545427610a705799d4ead5e7f00874af3fb148a03e202437456a7e"}, + {file = "pywinpty-1.1.6-cp39-none-win_amd64.whl", hash = "sha256:c703bf569a98ab7844b9daf37e88ab86f31862754ef6910a8b3824993a525c72"}, + {file = "pywinpty-1.1.6.tar.gz", hash = "sha256:8808f07350c709119cc4464144d6e749637f98e15acc1e5d3c37db1953d2eebc"}, ] pyyaml = [ {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, @@ -2046,116 +2279,143 @@ pyyaml-env-tag = [ {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, ] pyzmq = [ - {file = "pyzmq-22.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4e9b9a2f6944acdaf57316436c1acdcb30b8df76726bcf570ad9342bc5001654"}, - {file = "pyzmq-22.1.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24fb5bb641f0b2aa25fc3832f4b6fc62430f14a7d328229fe994b2bcdc07c93a"}, - {file = "pyzmq-22.1.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:c4674004ed64685a38bee222cd75afa769424ec603f9329f0dd4777138337f48"}, - {file = "pyzmq-22.1.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:461ed80d741692d9457ab820b1cc057ba9c37c394e67b647b639f623c8b321f6"}, - {file = "pyzmq-22.1.0-cp36-cp36m-win32.whl", hash = "sha256:de5806be66c9108e4dcdaced084e8ceae14100aa559e2d57b4f0cceb98c462de"}, - {file = "pyzmq-22.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a1c77796f395804d6002ff56a6a8168c1f98579896897ad7e35665a9b4a9eec5"}, - {file = "pyzmq-22.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c6a81c9e6754465d09a87e3acd74d9bb1f0039b2d785c6899622f0afdb41d760"}, - {file = "pyzmq-22.1.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0f0f27eaab9ba7b92d73d71c51d1a04464a1da6097a252d007922103253d2313"}, - {file = "pyzmq-22.1.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4b8fb1b3174b56fd020e4b10232b1764e52cf7f3babcfb460c5253bdc48adad0"}, - {file = "pyzmq-22.1.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:c8fff75af4c7af92dce9f81fa2a83ed009c3e1f33ee8b5222db2ef80b94e242e"}, - {file = "pyzmq-22.1.0-cp37-cp37m-win32.whl", hash = "sha256:cb9f9fe1305ef69b65794655fd89b2209b11bff3e837de981820a8aa051ef914"}, - {file = "pyzmq-22.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bf80b2cec42d96117248b99d3c86e263a00469c840a778e6cb52d916f4fdf82c"}, - {file = "pyzmq-22.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0ea7f4237991b0f745a4432c63e888450840bf8cb6c48b93fb7d62864f455529"}, - {file = "pyzmq-22.1.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:12ffcf33db6ba7c0e5aaf901e65517f5e2b719367b80bcbfad692f546a297c7a"}, - {file = "pyzmq-22.1.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d3ecfee2ee8d91ab2e08d2d8e89302c729b244e302bbc39c5b5dde42306ff003"}, - {file = "pyzmq-22.1.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:68e2c4505992ab5b89f976f89a9135742b18d60068f761bef994a6805f1cae0c"}, - {file = "pyzmq-22.1.0-cp38-cp38-win32.whl", hash = "sha256:285514956c08c7830da9d94e01f5414661a987831bd9f95e4d89cc8aaae8da10"}, - {file = "pyzmq-22.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:d5e5be93e1714a59a535bbbc086b9e4fd2448c7547c5288548f6fd86353cad9e"}, - {file = "pyzmq-22.1.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:b2f707b52e09098a7770503e39294ca6e22ae5138ffa1dd36248b6436d23d78e"}, - {file = "pyzmq-22.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:18dd2ca4540c476558099891c129e6f94109971d110b549db2a9775c817cedbd"}, - {file = "pyzmq-22.1.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:c6d0c32532a0519997e1ded767e184ebb8543bdb351f8eff8570bd461e874efc"}, - {file = "pyzmq-22.1.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:9ee48413a2d3cd867fd836737b4c89c24cea1150a37f4856d82d20293fa7519f"}, - {file = "pyzmq-22.1.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:4c4fe69c7dc0d13d4ae180ad650bb900854367f3349d3c16f0569f6c6447f698"}, - {file = "pyzmq-22.1.0-cp39-cp39-win32.whl", hash = "sha256:fc712a90401bcbf3fa25747f189d6dcfccbecc32712701cad25c6355589dac57"}, - {file = "pyzmq-22.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:68be16107f41563b9f67d93dff1c9f5587e0f76aa8fd91dc04c83d813bcdab1f"}, - {file = "pyzmq-22.1.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:734ea6565c71fc2d03d5b8c7d0d7519c96bb5567e0396da1b563c24a4ac66f0c"}, - {file = "pyzmq-22.1.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:1389b615917d4196962a9b469e947ba862a8ec6f5094a47da5e7a8d404bc07a4"}, - {file = "pyzmq-22.1.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:41049cff5265e9cd75606aa2c90a76b9c80b98d8fe70ee08cf4af3cedb113358"}, - {file = "pyzmq-22.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f49755684a963731479ff3035d45a8185545b4c9f662d368bd349c419839886d"}, - {file = "pyzmq-22.1.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:6355f81947e1fe6e7bb9e123aeb3067264391d3ebe8402709f824ef8673fa6f3"}, - {file = "pyzmq-22.1.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:089b974ec04d663b8685ac90e86bfe0e4da9d911ff3cf52cb765ff22408b102d"}, - {file = "pyzmq-22.1.0.tar.gz", hash = "sha256:7040d6dd85ea65703904d023d7f57fab793d7ffee9ba9e14f3b897f34ff2415d"}, + {file = "pyzmq-22.3.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:6b217b8f9dfb6628f74b94bdaf9f7408708cb02167d644edca33f38746ca12dd"}, + {file = "pyzmq-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2841997a0d85b998cbafecb4183caf51fd19c4357075dfd33eb7efea57e4c149"}, + {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f89468059ebc519a7acde1ee50b779019535db8dcf9b8c162ef669257fef7a93"}, + {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ea12133df25e3a6918718fbb9a510c6ee5d3fdd5a346320421aac3882f4feeea"}, + {file = "pyzmq-22.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76c532fd68b93998aab92356be280deec5de8f8fe59cd28763d2cc8a58747b7f"}, + {file = "pyzmq-22.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f907c7359ce8bf7f7e63c82f75ad0223384105f5126f313400b7e8004d9b33c3"}, + {file = "pyzmq-22.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:902319cfe23366595d3fa769b5b751e6ee6750a0a64c5d9f757d624b2ac3519e"}, + {file = "pyzmq-22.3.0-cp310-cp310-win32.whl", hash = "sha256:67db33bea0a29d03e6eeec55a8190e033318cee3cbc732ba8fd939617cbf762d"}, + {file = "pyzmq-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:7661fc1d5cb73481cf710a1418a4e1e301ed7d5d924f91c67ba84b2a1b89defd"}, + {file = "pyzmq-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:79244b9e97948eaf38695f4b8e6fc63b14b78cc37f403c6642ba555517ac1268"}, + {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab888624ed68930442a3f3b0b921ad7439c51ba122dbc8c386e6487a658e4a4e"}, + {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18cd854b423fce44951c3a4d3e686bac8f1243d954f579e120a1714096637cc0"}, + {file = "pyzmq-22.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:de8df0684398bd74ad160afdc2a118ca28384ac6f5e234eb0508858d8d2d9364"}, + {file = "pyzmq-22.3.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:62bcade20813796c426409a3e7423862d50ff0639f5a2a95be4b85b09a618666"}, + {file = "pyzmq-22.3.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ea5a79e808baef98c48c884effce05c31a0698c1057de8fc1c688891043c1ce1"}, + {file = "pyzmq-22.3.0-cp36-cp36m-win32.whl", hash = "sha256:3c1895c95be92600233e476fe283f042e71cf8f0b938aabf21b7aafa62a8dac9"}, + {file = "pyzmq-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:851977788b9caa8ed011f5f643d3ee8653af02c5fc723fa350db5125abf2be7b"}, + {file = "pyzmq-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b4ebed0977f92320f6686c96e9e8dd29eed199eb8d066936bac991afc37cbb70"}, + {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42abddebe2c6a35180ca549fadc7228d23c1e1f76167c5ebc8a936b5804ea2df"}, + {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1e41b32d6f7f9c26bc731a8b529ff592f31fc8b6ef2be9fa74abd05c8a342d7"}, + {file = "pyzmq-22.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:be4e0f229cf3a71f9ecd633566bd6f80d9fa6afaaff5489492be63fe459ef98c"}, + {file = "pyzmq-22.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:08c4e315a76ef26eb833511ebf3fa87d182152adf43dedee8d79f998a2162a0b"}, + {file = "pyzmq-22.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:badb868fff14cfd0e200eaa845887b1011146a7d26d579aaa7f966c203736b92"}, + {file = "pyzmq-22.3.0-cp37-cp37m-win32.whl", hash = "sha256:7c58f598d9fcc52772b89a92d72bf8829c12d09746a6d2c724c5b30076c1f11d"}, + {file = "pyzmq-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2b97502c16a5ec611cd52410bdfaab264997c627a46b0f98d3f666227fd1ea2d"}, + {file = "pyzmq-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d728b08448e5ac3e4d886b165385a262883c34b84a7fe1166277fe675e1c197a"}, + {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:480b9931bfb08bf8b094edd4836271d4d6b44150da051547d8c7113bf947a8b0"}, + {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7dc09198e4073e6015d9a8ea093fc348d4e59de49382476940c3dd9ae156fba8"}, + {file = "pyzmq-22.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ca6cd58f62a2751728016d40082008d3b3412a7f28ddfb4a2f0d3c130f69e74"}, + {file = "pyzmq-22.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:468bd59a588e276961a918a3060948ae68f6ff5a7fa10bb2f9160c18fe341067"}, + {file = "pyzmq-22.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c88fa7410e9fc471e0858638f403739ee869924dd8e4ae26748496466e27ac59"}, + {file = "pyzmq-22.3.0-cp38-cp38-win32.whl", hash = "sha256:c0f84360dcca3481e8674393bdf931f9f10470988f87311b19d23cda869bb6b7"}, + {file = "pyzmq-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:f762442bab706fd874064ca218b33a1d8e40d4938e96c24dafd9b12e28017f45"}, + {file = "pyzmq-22.3.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:954e73c9cd4d6ae319f1c936ad159072b6d356a92dcbbabfd6e6204b9a79d356"}, + {file = "pyzmq-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f43b4a2e6218371dd4f41e547bd919ceeb6ebf4abf31a7a0669cd11cd91ea973"}, + {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:acebba1a23fb9d72b42471c3771b6f2f18dcd46df77482612054bd45c07dfa36"}, + {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cf98fd7a6c8aaa08dbc699ffae33fd71175696d78028281bc7b832b26f00ca57"}, + {file = "pyzmq-22.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d072f7dfbdb184f0786d63bda26e8a0882041b1e393fbe98940395f7fab4c5e2"}, + {file = "pyzmq-22.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:53f4fd13976789ffafedd4d46f954c7bb01146121812b72b4ddca286034df966"}, + {file = "pyzmq-22.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d1b5d457acbadcf8b27561deeaa386b0217f47626b29672fa7bd31deb6e91e1b"}, + {file = "pyzmq-22.3.0-cp39-cp39-win32.whl", hash = "sha256:e6a02cf7271ee94674a44f4e62aa061d2d049001c844657740e156596298b70b"}, + {file = "pyzmq-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:d3dcb5548ead4f1123851a5ced467791f6986d68c656bc63bfff1bf9e36671e2"}, + {file = "pyzmq-22.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3a4c9886d61d386b2b493377d980f502186cd71d501fffdba52bd2a0880cef4f"}, + {file = "pyzmq-22.3.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:80e043a89c6cadefd3a0712f8a1322038e819ebe9dbac7eca3bce1721bcb63bf"}, + {file = "pyzmq-22.3.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1621e7a2af72cced1f6ec8ca8ca91d0f76ac236ab2e8828ac8fe909512d566cb"}, + {file = "pyzmq-22.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:d6157793719de168b199194f6b6173f0ccd3bf3499e6870fac17086072e39115"}, + {file = "pyzmq-22.3.0.tar.gz", hash = "sha256:8eddc033e716f8c91c6a2112f0a8ebc5e00532b4a6ae1eb0ccc48e027f9c671c"}, ] regex = [ - {file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7"}, - {file = "regex-2021.4.4-cp36-cp36m-win32.whl", hash = "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29"}, - {file = "regex-2021.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79"}, - {file = "regex-2021.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439"}, - {file = "regex-2021.4.4-cp37-cp37m-win32.whl", hash = "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d"}, - {file = "regex-2021.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3"}, - {file = "regex-2021.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87"}, - {file = "regex-2021.4.4-cp38-cp38-win32.whl", hash = "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac"}, - {file = "regex-2021.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2"}, - {file = "regex-2021.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"}, - {file = "regex-2021.4.4-cp39-cp39-win32.whl", hash = "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6"}, - {file = "regex-2021.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07"}, - {file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"}, + {file = "regex-2021.11.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9345b6f7ee578bad8e475129ed40123d265464c4cfead6c261fd60fc9de00bcf"}, + {file = "regex-2021.11.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:416c5f1a188c91e3eb41e9c8787288e707f7d2ebe66e0a6563af280d9b68478f"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0538c43565ee6e703d3a7c3bdfe4037a5209250e8502c98f20fea6f5fdf2965"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ee1227cf08b6716c85504aebc49ac827eb88fcc6e51564f010f11a406c0a667"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6650f16365f1924d6014d2ea770bde8555b4a39dc9576abb95e3cd1ff0263b36"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30ab804ea73972049b7a2a5c62d97687d69b5a60a67adca07eb73a0ddbc9e29f"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:68a067c11463de2a37157930d8b153005085e42bcb7ad9ca562d77ba7d1404e0"}, + {file = "regex-2021.11.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:162abfd74e88001d20cb73ceaffbfe601469923e875caf9118333b1a4aaafdc4"}, + {file = "regex-2021.11.10-cp310-cp310-win32.whl", hash = "sha256:98ba568e8ae26beb726aeea2273053c717641933836568c2a0278a84987b2a1a"}, + {file = "regex-2021.11.10-cp310-cp310-win_amd64.whl", hash = "sha256:780b48456a0f0ba4d390e8b5f7c661fdd218934388cde1a974010a965e200e12"}, + {file = "regex-2021.11.10-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dba70f30fd81f8ce6d32ddeef37d91c8948e5d5a4c63242d16a2b2df8143aafc"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1f54b9b4b6c53369f40028d2dd07a8c374583417ee6ec0ea304e710a20f80a0"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fbb9dc00e39f3e6c0ef48edee202f9520dafb233e8b51b06b8428cfcb92abd30"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666abff54e474d28ff42756d94544cdfd42e2ee97065857413b72e8a2d6a6345"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5537f71b6d646f7f5f340562ec4c77b6e1c915f8baae822ea0b7e46c1f09b733"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2e07c6a26ed4bea91b897ee2b0835c21716d9a469a96c3e878dc5f8c55bb23"}, + {file = "regex-2021.11.10-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ca5f18a75e1256ce07494e245cdb146f5a9267d3c702ebf9b65c7f8bd843431e"}, + {file = "regex-2021.11.10-cp36-cp36m-win32.whl", hash = "sha256:93a5051fcf5fad72de73b96f07d30bc29665697fb8ecdfbc474f3452c78adcf4"}, + {file = "regex-2021.11.10-cp36-cp36m-win_amd64.whl", hash = "sha256:b483c9d00a565633c87abd0aaf27eb5016de23fed952e054ecc19ce32f6a9e7e"}, + {file = "regex-2021.11.10-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fff55f3ce50a3ff63ec8e2a8d3dd924f1941b250b0aac3d3d42b687eeff07a8e"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e32d2a2b02ccbef10145df9135751abea1f9f076e67a4e261b05f24b94219e36"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53db2c6be8a2710b359bfd3d3aa17ba38f8aa72a82309a12ae99d3c0c3dcd74d"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2207ae4f64ad3af399e2d30dde66f0b36ae5c3129b52885f1bffc2f05ec505c8"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5ca078bb666c4a9d1287a379fe617a6dccd18c3e8a7e6c7e1eb8974330c626a"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd33eb9bdcfbabab3459c9ee651d94c842bc8a05fabc95edf4ee0c15a072495e"}, + {file = "regex-2021.11.10-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05b7d6d7e64efe309972adab77fc2af8907bb93217ec60aa9fe12a0dad35874f"}, + {file = "regex-2021.11.10-cp37-cp37m-win32.whl", hash = "sha256:e71255ba42567d34a13c03968736c5d39bb4a97ce98188fafb27ce981115beec"}, + {file = "regex-2021.11.10-cp37-cp37m-win_amd64.whl", hash = "sha256:07856afef5ffcc052e7eccf3213317fbb94e4a5cd8177a2caa69c980657b3cb4"}, + {file = "regex-2021.11.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba05430e819e58544e840a68b03b28b6d328aff2e41579037e8bab7653b37d83"}, + {file = "regex-2021.11.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7f301b11b9d214f83ddaf689181051e7f48905568b0c7017c04c06dfd065e244"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aaa4e0705ef2b73dd8e36eeb4c868f80f8393f5f4d855e94025ce7ad8525f50"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:788aef3549f1924d5c38263104dae7395bf020a42776d5ec5ea2b0d3d85d6646"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8af619e3be812a2059b212064ea7a640aff0568d972cd1b9e920837469eb3cb"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85bfa6a5413be0ee6c5c4a663668a2cad2cbecdee367630d097d7823041bdeec"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f23222527b307970e383433daec128d769ff778d9b29343fb3496472dc20dabe"}, + {file = "regex-2021.11.10-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:da1a90c1ddb7531b1d5ff1e171b4ee61f6345119be7351104b67ff413843fe94"}, + {file = "regex-2021.11.10-cp38-cp38-win32.whl", hash = "sha256:0617383e2fe465732af4509e61648b77cbe3aee68b6ac8c0b6fe934db90be5cc"}, + {file = "regex-2021.11.10-cp38-cp38-win_amd64.whl", hash = "sha256:a3feefd5e95871872673b08636f96b61ebef62971eab044f5124fb4dea39919d"}, + {file = "regex-2021.11.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7f325be2804246a75a4f45c72d4ce80d2443ab815063cdf70ee8fb2ca59ee1b"}, + {file = "regex-2021.11.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:537ca6a3586931b16a85ac38c08cc48f10fc870a5b25e51794c74df843e9966d"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef2afb0fd1747f33f1ee3e209bce1ed582d1896b240ccc5e2697e3275f037c7"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:432bd15d40ed835a51617521d60d0125867f7b88acf653e4ed994a1f8e4995dc"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b43c2b8a330a490daaef5a47ab114935002b13b3f9dc5da56d5322ff218eeadb"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:962b9a917dd7ceacbe5cd424556914cb0d636001e393b43dc886ba31d2a1e449"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa8c626d6441e2d04b6ee703ef2d1e17608ad44c7cb75258c09dd42bacdfc64b"}, + {file = "regex-2021.11.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3c5fb32cc6077abad3bbf0323067636d93307c9fa93e072771cf9a64d1c0f3ef"}, + {file = "regex-2021.11.10-cp39-cp39-win32.whl", hash = "sha256:3b5df18db1fccd66de15aa59c41e4f853b5df7550723d26aa6cb7f40e5d9da5a"}, + {file = "regex-2021.11.10-cp39-cp39-win_amd64.whl", hash = "sha256:83ee89483672b11f8952b158640d0c0ff02dc43d9cb1b70c1564b49abe92ce29"}, + {file = "regex-2021.11.10.tar.gz", hash = "sha256:f341ee2df0999bfdf7a95e448075effe0db212a59387de1a70690e4acb03d4c6"}, ] requests = [ - {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, - {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, + {file = "requests-2.26.0-py2.py3-none-any.whl", hash = "sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24"}, + {file = "requests-2.26.0.tar.gz", hash = "sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"}, ] responses = [ - {file = "responses-0.13.3-py2.py3-none-any.whl", hash = "sha256:b54067596f331786f5ed094ff21e8d79e6a1c68ef625180a7d34808d6f36c11b"}, - {file = "responses-0.13.3.tar.gz", hash = "sha256:18a5b88eb24143adbf2b4100f328a2f5bfa72fbdacf12d97d41f07c26c45553d"}, + {file = "responses-0.16.0-py2.py3-none-any.whl", hash = "sha256:f358ef75e8bf431b0aa203cc62625c3a1c80a600dbe9de91b944bf4e9c600b92"}, + {file = "responses-0.16.0.tar.gz", hash = "sha256:a2e3aca2a8277e61257cd3b1c154b1dd0d782b1ae3d38b7fa37cbe3feb531791"}, ] s3transfer = [ - {file = "s3transfer-0.4.2-py2.py3-none-any.whl", hash = "sha256:9b3752887a2880690ce628bc263d6d13a3864083aeacff4890c1c9839a5eb0bc"}, - {file = "s3transfer-0.4.2.tar.gz", hash = "sha256:cb022f4b16551edebbb31a377d3f09600dbada7363d8c5db7976e7f47732e1b2"}, + {file = "s3transfer-0.5.0-py3-none-any.whl", hash = "sha256:9c1dc369814391a6bda20ebbf4b70a0f34630592c9aa520856bf384916af2803"}, + {file = "s3transfer-0.5.0.tar.gz", hash = "sha256:50ed823e1dc5868ad40c8dc92072f757aa0e653a192845c94a3b676f4a62da4c"}, ] send2trash = [ - {file = "Send2Trash-1.5.0-py3-none-any.whl", hash = "sha256:f1691922577b6fa12821234aeb57599d887c4900b9ca537948d2dac34aea888b"}, - {file = "Send2Trash-1.5.0.tar.gz", hash = "sha256:60001cc07d707fe247c94f74ca6ac0d3255aabcb930529690897ca2a39db28b2"}, + {file = "Send2Trash-1.8.0-py3-none-any.whl", hash = "sha256:f20eaadfdb517eaca5ce077640cb261c7d2698385a6a0f072a4a5447fd49fa08"}, + {file = "Send2Trash-1.8.0.tar.gz", hash = "sha256:d2c24762fd3759860a0aff155e45871447ea58d2be6bdd39b5c8f966a0c99c2d"}, +] +shellingham = [ + {file = "shellingham-1.4.0-py2.py3-none-any.whl", hash = "sha256:536b67a0697f2e4af32ab176c00a50ac2899c5a05e0d8e2dadac8e58888283f9"}, + {file = "shellingham-1.4.0.tar.gz", hash = "sha256:4855c2458d6904829bd34c299f11fdeed7cfefbf8a2c522e4caea6cd76b3171e"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] smmap = [ - {file = "smmap-4.0.0-py2.py3-none-any.whl", hash = "sha256:a9a7479e4c572e2e775c404dcd3080c8dc49f39918c2cf74913d30c4c478e3c2"}, - {file = "smmap-4.0.0.tar.gz", hash = "sha256:7e65386bd122d45405ddf795637b7f7d2b532e7e401d46bbe3fb49b9986d5182"}, + {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, + {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, ] structlog = [ - {file = "structlog-21.1.0-py2.py3-none-any.whl", hash = "sha256:62f06fc0ee32fb8580f0715eea66cb87271eb7efb0eaf9af6b639cba8981de47"}, - {file = "structlog-21.1.0.tar.gz", hash = "sha256:d9d2d890532e8db83c6977a2a676fb1889922ff0c26ad4dc0ecac26f9fafbc57"}, + {file = "structlog-21.3.0-py3-none-any.whl", hash = "sha256:063216becff8e6f6558122a9b00734f7e50bfef309eb730c85a52c74ed861a96"}, + {file = "structlog-21.3.0.tar.gz", hash = "sha256:4da2aec0aebf6dee7beb884eb0fda26ed9d6cce5338fcd523e8597d0f1826746"}, ] termcolor = [ {file = "termcolor-1.1.0.tar.gz", hash = "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b"}, ] terminado = [ - {file = "terminado-0.10.1-py3-none-any.whl", hash = "sha256:c89ace5bffd0e7268bdcf22526830eb787fd146ff9d78691a0528386f92b9ae3"}, - {file = "terminado-0.10.1.tar.gz", hash = "sha256:89d5dac2f4e2b39758a0ff9a3b643707c95a020a6df36e70583b88297cd59cbe"}, + {file = "terminado-0.12.1-py3-none-any.whl", hash = "sha256:09fdde344324a1c9c6e610ee4ca165c4bb7f5bbf982fceeeb38998a988ef8452"}, + {file = "terminado-0.12.1.tar.gz", hash = "sha256:b20fd93cc57c1678c799799d117874367cc07a3d2d55be95205b1a88fa08393f"}, ] testpath = [ {file = "testpath-0.5.0-py3-none-any.whl", hash = "sha256:8044f9a0bab6567fc644a3593164e872543bb44225b0e24846e2c89237937589"}, @@ -2165,6 +2425,10 @@ toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] +tomli = [ + {file = "tomli-1.2.2-py3-none-any.whl", hash = "sha256:f04066f68f5554911363063a30b108d2b5a5b1a010aa8b6132af78489fe3aade"}, + {file = "tomli-1.2.2.tar.gz", hash = "sha256:c6ce0015eb38820eaf32b5db832dbc26deb3dd427bd5f6556cf0acac2c214fee"}, +] tornado = [ {file = "tornado-6.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:d371e811d6b156d82aa5f9a4e08b58debf97c302a35714f6f45e35139c332e32"}, {file = "tornado-6.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:0d321a39c36e5f2c4ff12b4ed58d41390460f798422c4504e09eb5678e09998c"}, @@ -2209,43 +2473,53 @@ tornado = [ {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, ] tox = [ - {file = "tox-3.23.1-py2.py3-none-any.whl", hash = "sha256:b0b5818049a1c1997599d42012a637a33f24c62ab8187223fdd318fa8522637b"}, - {file = "tox-3.23.1.tar.gz", hash = "sha256:307a81ddb82bd463971a273f33e9533a24ed22185f27db8ce3386bff27d324e3"}, + {file = "tox-3.24.4-py2.py3-none-any.whl", hash = "sha256:5e274227a53dc9ef856767c21867377ba395992549f02ce55eb549f9fb9a8d10"}, + {file = "tox-3.24.4.tar.gz", hash = "sha256:c30b57fa2477f1fb7c36aa1d83292d5c2336cd0018119e1b1c17340e2c2708ca"}, ] tox-poetry = [ - {file = "tox-poetry-0.4.0.tar.gz", hash = "sha256:b926723cb1dea87902299dd8cad458d0b80ab7345bbf7983e6dab1cbf951090d"}, - {file = "tox_poetry-0.4.0-py2.py3-none-any.whl", hash = "sha256:b529f3b534b351b7cf9506caa66e2f230dded38b2b74245559c3d662685f4494"}, + {file = "tox-poetry-0.4.1.tar.gz", hash = "sha256:2395808e1ce487b5894c10f2202e14702bfa6d6909c0d1e525170d14809ac7ef"}, + {file = "tox_poetry-0.4.1-py2.py3-none-any.whl", hash = "sha256:11d9cd4e51d4cd9484b3ba63f2650ab4cfb4096e5f0682ecf561ddfc3c8e8c92"}, ] traitlets = [ - {file = "traitlets-5.0.5-py3-none-any.whl", hash = "sha256:69ff3f9d5351f31a7ad80443c2674b7099df13cc41fc5fa6e2f6d3b0330b0426"}, - {file = "traitlets-5.0.5.tar.gz", hash = "sha256:178f4ce988f69189f7e523337a3e11d91c786ded9360174a3d9ca83e79bc5396"}, + {file = "traitlets-5.1.1-py3-none-any.whl", hash = "sha256:2d313cc50a42cd6c277e7d7dc8d4d7fedd06a2c215f78766ae7b1a66277e0033"}, + {file = "traitlets-5.1.1.tar.gz", hash = "sha256:059f456c5a7c1c82b98c2e8c799f39c9b8128f6d0d46941ee118daace9eb70c7"}, +] +typing-extensions = [ + {file = "typing_extensions-4.0.0-py3-none-any.whl", hash = "sha256:829704698b22e13ec9eaf959122315eabb370b0884400e9818334d8b677023d9"}, + {file = "typing_extensions-4.0.0.tar.gz", hash = "sha256:2cdf80e4e04866a9b3689a51869016d36db0814d84b8d8a568d22781d45d27ed"}, ] urllib3 = [ - {file = "urllib3-1.26.5-py2.py3-none-any.whl", hash = "sha256:753a0374df26658f99d826cfe40394a686d05985786d946fbe4165b5148f5a7c"}, - {file = "urllib3-1.26.5.tar.gz", hash = "sha256:a7acd0977125325f516bda9735fa7142b909a8d01e8b2e4c8108d0984e6e0098"}, + {file = "urllib3-1.26.7-py2.py3-none-any.whl", hash = "sha256:c4fdf4019605b6e5423637e01bc9fe4daef873709a7973e195ceba0a62bbc844"}, + {file = "urllib3-1.26.7.tar.gz", hash = "sha256:4987c65554f7a2dbf30c18fd48778ef124af6fab771a377103da0585e2336ece"}, ] virtualenv = [ - {file = "virtualenv-20.4.7-py2.py3-none-any.whl", hash = "sha256:2b0126166ea7c9c3661f5b8e06773d28f83322de7a3ff7d06f0aed18c9de6a76"}, - {file = "virtualenv-20.4.7.tar.gz", hash = "sha256:14fdf849f80dbb29a4eb6caa9875d476ee2a5cf76a5f5415fa2f1606010ab467"}, + {file = "virtualenv-20.10.0-py2.py3-none-any.whl", hash = "sha256:4b02e52a624336eece99c96e3ab7111f469c24ba226a53ec474e8e787b365814"}, + {file = "virtualenv-20.10.0.tar.gz", hash = "sha256:576d05b46eace16a9c348085f7d0dc8ef28713a2cabaa1cf0aea41e8f12c9218"}, ] watchdog = [ - {file = "watchdog-2.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:581e3548159fe7d2a9f377a1fbcb41bdcee46849cca8ab803c7ac2e5e04ec77c"}, - {file = "watchdog-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:edcd9ef3fd460bb8a98eb1fcf99941e9fd9f275f45f1a82cb1359ec92975d647"}, - {file = "watchdog-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d34ce2261f118ecd57eedeef95fc2a495fc4a40b3ed7b3bf0bd7a8ccc1ab4f8f"}, - {file = "watchdog-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:668391e6c32742d76e5be5db6bf95c455fa4b3d11e76a77c13b39bccb3a47a72"}, - {file = "watchdog-2.1.2-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6ef9fe57162c4c361692620e1d9167574ba1975ee468b24051ca11c9bba6438e"}, - {file = "watchdog-2.1.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:58ebb1095ee493008a7789d47dd62e4999505d82be89fc884d473086fccc6ebd"}, - {file = "watchdog-2.1.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:91387ee2421f30b75f7ff632c9d48f76648e56bf346a7c805c0a34187a93aab4"}, - {file = "watchdog-2.1.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:a6471517315a8541a943c00b45f1d252e36898a3ae963d2d52509b89a50cb2b9"}, - {file = "watchdog-2.1.2-py3-none-manylinux2014_i686.whl", hash = "sha256:a42e6d652f820b2b94cd03156c62559a2ea68d476476dfcd77d931e7f1012d4a"}, - {file = "watchdog-2.1.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:3d6405681471ebe0beb3aa083998c4870e48b57f8afdb45ea1b5957cc5cf1014"}, - {file = "watchdog-2.1.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:598d772beeaf9c98d0df946fbabf0c8365dd95ea46a250c224c725fe0c4730bc"}, - {file = "watchdog-2.1.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:4b219d46d89cfa49af1d73175487c14a318a74cb8c5442603fd13c6a5b418c86"}, - {file = "watchdog-2.1.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:188145185c08c73c56f1478ccf1f0f0f85101191439679b35b6b100886ce0b39"}, - {file = "watchdog-2.1.2-py3-none-win32.whl", hash = "sha256:255a32d44bbbe62e52874ff755e2eefe271b150e0ec240ad7718a62a7a7a73c4"}, - {file = "watchdog-2.1.2-py3-none-win_amd64.whl", hash = "sha256:1a62a4671796dc93d1a7262286217d9e75823c63d4c42782912d39a506d30046"}, - {file = "watchdog-2.1.2-py3-none-win_ia64.whl", hash = "sha256:104266a778906ae0e971368d368a65c4cd032a490a9fca5ba0b78c6c7ae11720"}, - {file = "watchdog-2.1.2.tar.gz", hash = "sha256:0237db4d9024859bea27d0efb59fe75eef290833fd988b8ead7a879b0308c2db"}, + {file = "watchdog-2.1.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9693f35162dc6208d10b10ddf0458cc09ad70c30ba689d9206e02cd836ce28a3"}, + {file = "watchdog-2.1.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aba5c812f8ee8a3ff3be51887ca2d55fb8e268439ed44110d3846e4229eb0e8b"}, + {file = "watchdog-2.1.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ae38bf8ba6f39d5b83f78661273216e7db5b00f08be7592062cb1fc8b8ba542"}, + {file = "watchdog-2.1.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ad6f1796e37db2223d2a3f302f586f74c72c630b48a9872c1e7ae8e92e0ab669"}, + {file = "watchdog-2.1.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:922a69fa533cb0c793b483becaaa0845f655151e7256ec73630a1b2e9ebcb660"}, + {file = "watchdog-2.1.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b2fcf9402fde2672545b139694284dc3b665fd1be660d73eca6805197ef776a3"}, + {file = "watchdog-2.1.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3386b367e950a11b0568062b70cc026c6f645428a698d33d39e013aaeda4cc04"}, + {file = "watchdog-2.1.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f1c00aa35f504197561060ca4c21d3cc079ba29cf6dd2fe61024c70160c990b"}, + {file = "watchdog-2.1.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b52b88021b9541a60531142b0a451baca08d28b74a723d0c99b13c8c8d48d604"}, + {file = "watchdog-2.1.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8047da932432aa32c515ec1447ea79ce578d0559362ca3605f8e9568f844e3c6"}, + {file = "watchdog-2.1.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e92c2d33858c8f560671b448205a268096e17870dcf60a9bb3ac7bfbafb7f5f9"}, + {file = "watchdog-2.1.6-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b7d336912853d7b77f9b2c24eeed6a5065d0a0cc0d3b6a5a45ad6d1d05fb8cd8"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_aarch64.whl", hash = "sha256:cca7741c0fcc765568350cb139e92b7f9f3c9a08c4f32591d18ab0a6ac9e71b6"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_armv7l.whl", hash = "sha256:25fb5240b195d17de949588628fdf93032ebf163524ef08933db0ea1f99bd685"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_i686.whl", hash = "sha256:be9be735f827820a06340dff2ddea1fb7234561fa5e6300a62fe7f54d40546a0"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0d19fb2441947b58fbf91336638c2b9f4cc98e05e1045404d7a4cb7cddc7a65"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:3becdb380d8916c873ad512f1701f8a92ce79ec6978ffde92919fd18d41da7fb"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_s390x.whl", hash = "sha256:ae67501c95606072aafa865b6ed47343ac6484472a2f95490ba151f6347acfc2"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_x86_64.whl", hash = "sha256:e0f30db709c939cabf64a6dc5babb276e6d823fd84464ab916f9b9ba5623ca15"}, + {file = "watchdog-2.1.6-py3-none-win32.whl", hash = "sha256:e02794ac791662a5eafc6ffeaf9bcc149035a0e48eb0a9d40a8feb4622605a3d"}, + {file = "watchdog-2.1.6-py3-none-win_amd64.whl", hash = "sha256:bd9ba4f332cf57b2c1f698be0728c020399ef3040577cde2939f2e045b39c1e5"}, + {file = "watchdog-2.1.6-py3-none-win_ia64.whl", hash = "sha256:a0f1c7edf116a12f7245be06120b1852275f9506a7d90227648b250755a03923"}, + {file = "watchdog-2.1.6.tar.gz", hash = "sha256:a36e75df6c767cbf46f61a91c70b3ba71811dfa0aca4a324d9407a06a8b7a2e7"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, @@ -2256,17 +2530,67 @@ webencodings = [ {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, ] werkzeug = [ - {file = "Werkzeug-2.0.1-py3-none-any.whl", hash = "sha256:6c1ec500dcdba0baa27600f6a22f6333d8b662d22027ff9f6202e3367413caa8"}, - {file = "Werkzeug-2.0.1.tar.gz", hash = "sha256:1de1db30d010ff1af14a009224ec49ab2329ad2cde454c8a708130642d579c42"}, + {file = "Werkzeug-2.0.2-py3-none-any.whl", hash = "sha256:63d3dc1cf60e7b7e35e97fa9861f7397283b75d765afcaefd993d6046899de8f"}, + {file = "Werkzeug-2.0.2.tar.gz", hash = "sha256:aa2bb6fc8dee8d6c504c0ac1e7f5f7dc5810a9903e793b6f715a9f015bdadb9a"}, ] wrapt = [ - {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, + {file = "wrapt-1.13.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e05e60ff3b2b0342153be4d1b597bbcfd8330890056b9619f4ad6b8d5c96a81a"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:85148f4225287b6a0665eef08a178c15097366d46b210574a658c1ff5b377489"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:2dded5496e8f1592ec27079b28b6ad2a1ef0b9296d270f77b8e4a3a796cf6909"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e94b7d9deaa4cc7bac9198a58a7240aaf87fe56c6277ee25fa5b3aa1edebd229"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:498e6217523111d07cd67e87a791f5e9ee769f9241fcf8a379696e25806965af"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ec7e20258ecc5174029a0f391e1b948bf2906cd64c198a9b8b281b811cbc04de"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:87883690cae293541e08ba2da22cacaae0a092e0ed56bbba8d018cc486fbafbb"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:f99c0489258086308aad4ae57da9e8ecf9e1f3f30fa35d5e170b4d4896554d80"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6a03d9917aee887690aa3f1747ce634e610f6db6f6b332b35c2dd89412912bca"}, + {file = "wrapt-1.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:936503cb0a6ed28dbfa87e8fcd0a56458822144e9d11a49ccee6d9a8adb2ac44"}, + {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f9c51d9af9abb899bd34ace878fbec8bf357b3194a10c4e8e0a25512826ef056"}, + {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:220a869982ea9023e163ba915077816ca439489de6d2c09089b219f4e11b6785"}, + {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0877fe981fd76b183711d767500e6b3111378ed2043c145e21816ee589d91096"}, + {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:43e69ffe47e3609a6aec0fe723001c60c65305784d964f5007d5b4fb1bc6bf33"}, + {file = "wrapt-1.13.3-cp310-cp310-win32.whl", hash = "sha256:78dea98c81915bbf510eb6a3c9c24915e4660302937b9ae05a0947164248020f"}, + {file = "wrapt-1.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:ea3e746e29d4000cd98d572f3ee2a6050a4f784bb536f4ac1f035987fc1ed83e"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:8c73c1a2ec7c98d7eaded149f6d225a692caa1bd7b2401a14125446e9e90410d"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:086218a72ec7d986a3eddb7707c8c4526d677c7b35e355875a0fe2918b059179"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:e92d0d4fa68ea0c02d39f1e2f9cb5bc4b4a71e8c442207433d8db47ee79d7aa3"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:d4a5f6146cfa5c7ba0134249665acd322a70d1ea61732723c7d3e8cc0fa80755"}, + {file = "wrapt-1.13.3-cp35-cp35m-win32.whl", hash = "sha256:8aab36778fa9bba1a8f06a4919556f9f8c7b33102bd71b3ab307bb3fecb21851"}, + {file = "wrapt-1.13.3-cp35-cp35m-win_amd64.whl", hash = "sha256:944b180f61f5e36c0634d3202ba8509b986b5fbaf57db3e94df11abee244ba13"}, + {file = "wrapt-1.13.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2ebdde19cd3c8cdf8df3fc165bc7827334bc4e353465048b36f7deeae8ee0918"}, + {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:610f5f83dd1e0ad40254c306f4764fcdc846641f120c3cf424ff57a19d5f7ade"}, + {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5601f44a0f38fed36cc07db004f0eedeaadbdcec90e4e90509480e7e6060a5bc"}, + {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e6906d6f48437dfd80464f7d7af1740eadc572b9f7a4301e7dd3d65db285cacf"}, + {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:766b32c762e07e26f50d8a3468e3b4228b3736c805018e4b0ec8cc01ecd88125"}, + {file = "wrapt-1.13.3-cp36-cp36m-win32.whl", hash = "sha256:5f223101f21cfd41deec8ce3889dc59f88a59b409db028c469c9b20cfeefbe36"}, + {file = "wrapt-1.13.3-cp36-cp36m-win_amd64.whl", hash = "sha256:f122ccd12fdc69628786d0c947bdd9cb2733be8f800d88b5a37c57f1f1d73c10"}, + {file = "wrapt-1.13.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:46f7f3af321a573fc0c3586612db4decb7eb37172af1bc6173d81f5b66c2e068"}, + {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:778fd096ee96890c10ce96187c76b3e99b2da44e08c9e24d5652f356873f6709"}, + {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0cb23d36ed03bf46b894cfec777eec754146d68429c30431c99ef28482b5c1df"}, + {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:96b81ae75591a795d8c90edc0bfaab44d3d41ffc1aae4d994c5aa21d9b8e19a2"}, + {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7dd215e4e8514004c8d810a73e342c536547038fb130205ec4bba9f5de35d45b"}, + {file = "wrapt-1.13.3-cp37-cp37m-win32.whl", hash = "sha256:47f0a183743e7f71f29e4e21574ad3fa95676136f45b91afcf83f6a050914829"}, + {file = "wrapt-1.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fd76c47f20984b43d93de9a82011bb6e5f8325df6c9ed4d8310029a55fa361ea"}, + {file = "wrapt-1.13.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b73d4b78807bd299b38e4598b8e7bd34ed55d480160d2e7fdaabd9931afa65f9"}, + {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ec9465dd69d5657b5d2fa6133b3e1e989ae27d29471a672416fd729b429eb554"}, + {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dd91006848eb55af2159375134d724032a2d1d13bcc6f81cd8d3ed9f2b8e846c"}, + {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ae9de71eb60940e58207f8e71fe113c639da42adb02fb2bcbcaccc1ccecd092b"}, + {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:51799ca950cfee9396a87f4a1240622ac38973b6df5ef7a41e7f0b98797099ce"}, + {file = "wrapt-1.13.3-cp38-cp38-win32.whl", hash = "sha256:4b9c458732450ec42578b5642ac53e312092acf8c0bfce140ada5ca1ac556f79"}, + {file = "wrapt-1.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:7dde79d007cd6dfa65afe404766057c2409316135cb892be4b1c768e3f3a11cb"}, + {file = "wrapt-1.13.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:981da26722bebb9247a0601e2922cedf8bb7a600e89c852d063313102de6f2cb"}, + {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:705e2af1f7be4707e49ced9153f8d72131090e52be9278b5dbb1498c749a1e32"}, + {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25b1b1d5df495d82be1c9d2fad408f7ce5ca8a38085e2da41bb63c914baadff7"}, + {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:77416e6b17926d953b5c666a3cb718d5945df63ecf922af0ee576206d7033b5e"}, + {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:865c0b50003616f05858b22174c40ffc27a38e67359fa1495605f96125f76640"}, + {file = "wrapt-1.13.3-cp39-cp39-win32.whl", hash = "sha256:0a017a667d1f7411816e4bf214646d0ad5b1da2c1ea13dec6c162736ff25a374"}, + {file = "wrapt-1.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:81bd7c90d28a4b2e1df135bfbd7c23aee3050078ca6441bead44c42483f9ebfb"}, + {file = "wrapt-1.13.3.tar.gz", hash = "sha256:1fea9cd438686e6682271d36f3481a9f3636195578bab9ca3382e2f5f01fc185"}, ] xmltodict = [ {file = "xmltodict-0.12.0-py2.py3-none-any.whl", hash = "sha256:8bbcb45cc982f48b2ca8fe7e7827c5d792f217ecf1792626f808bf41c3b86051"}, {file = "xmltodict-0.12.0.tar.gz", hash = "sha256:50d8c638ed7ecb88d90561beedbf720c9b4e851a9fa6c47ebd64e99d166d8a21"}, ] zipp = [ - {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, - {file = "zipp-3.4.1.tar.gz", hash = "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76"}, + {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, + {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, ] diff --git a/pyproject.toml b/pyproject.toml index 9cd01a8..773f2de 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "aws_data_tools" -version = "0.1.0-beta2" +version = "0.1.1" description = "A set of Python libraries for querying and transforming data from AWS APIs" authors = ["Tim O'Guin "] license = "MIT" @@ -18,7 +18,7 @@ classifiers = [ "Programming Language :: Python :: 3.9", "Topic :: Software Development :: Libraries :: Python Modules" ] -exclude = ["tests"] +exclude = ["conftest.py", "fixtures", "tests"] include = ["CHANGELOG.md"] [tool.poetry.urls] @@ -30,41 +30,45 @@ Tracker = "https://github.com/timoguin/aws-data-tools-py/issues" [tool.poetry.dependencies] python = ">=3.9,<4" boto3 = "^1.17.80" +dacite = "^1.6.0" +graphviz = "^0.16" pyhumps = "^3.0.2" PyYAML = "^5.4.1" structlog = "^21.1.0" # Optional extras: cli -click = {version = "^8.0.1", optional = true, extras = ["cli"]} +click = {version = "^8.0.1", optional = true, extras=["cli"]} +click-completion = {version = "^0.5.2", optional = true, extras=["cli"]} # Optional extras: devtools -ipdb = {version = "^0.13.8", optional = true, extras=["devtools"]} -notebook = {version = "^6.4.0", optional = true, extras = ["devtools"]} -pre-commit = {version = "^2.13.0", optional = true, extras = ["devtools"]} +ipdb = {version = "^0.13.8", optional = true} +notebook = {version = "^6.4.0", optional = true} +pre-commit = {version = "^2.13.0", optional = true} # Optional extras: docs -blacken-docs = {version = "^1.10.0", optional = true, extras = ["docs"]} -mkdocs = {version = "^1.1.2", optional = true, extras = ["docs"]} -mkdocs-git-revision-date-localized-plugin = {version = "^0.9.2", optional = true, extras = ["docs"]} -mkdocs-macros-plugin = {version = "^0.5.5", optional = true, extras = ["docs"]} -mkdocs-material = {version = "^7.1.5", optional = true, extras = ["docs"]} -graphviz = "^0.16" +blacken-docs = {version = "^1.12.0", optional = true} +mkdocs = {version = "^1.1.2", optional = true} +mkdocs-git-revision-date-localized-plugin = {version = "^0.9.2", optional = true} +mkdocs-macros-plugin = {version = "^0.5.5", optional = true} +mkdocs-material = {version = "^7.1.5", optional = true} [tool.poetry.dev-dependencies] -black = "^21.5b1" +black = "^21.11b1" +coverage = "^5.5" flakehell = "^0.9.0" isort = "^5.8.0" -pylint = "^2.8.3" -pytest = "^6.2.4" -tox-poetry = "^0.4.0" +mccabe = "^0.6.1" moto = "^2.0.9" pycodestyle = "^2.7.0" pyflakes = "^2.3.1" -mccabe = "^0.6.1" +pylint = "^2.8.3" +pytest = "^6.2.4" +pytest-cov = "^2.12.1" pytest-custom-exit-code = "^0.3.0" +tox-poetry = "^0.4.0" [tool.poetry.extras] -cli = ["click"] +cli = ["click", "click-completion"] devtools = [ "ipdb", "notebook", @@ -110,3 +114,25 @@ mccabe = ["+*"] pycodestyle = ["+*"] pyflakes = ["+*"] pylint = ["+*"] + +[tool.coverage.run] +source = ["aws_data_tools"] +omit = [ + "*test*", + "**/__init__.py", + "client/__init__.py", + "models/__init__.py", + "utils/__init__.py" +] + +[tool.coverage.report] +omit = [ + "*test*", + "__init__.py", + "client/__init__.py", + "models/__init__.py", + "utils/__init__.py" +] + +[tool.pytest.ini_options] +addopts = "-ra -q" diff --git a/test.py b/test.py new file mode 100644 index 0000000..1c5d710 --- /dev/null +++ b/test.py @@ -0,0 +1,85 @@ +from moto import mock_organizations + +from aws_data_tools.models import ( + Account, + OrganizationalUnit, + OrganizationDataBuilder, +) +from aws_data_tools import conftest + +ou_paths = conftest.ou_paths() +account_paths = conftest.account_paths() + +mock = mock_organizations() +mock.start() + +odb = OrganizationDataBuilder() +odb.api("create_organization", feature_set="ALL") +odb.fetch_organization() +odb.fetch_root() + + +def create_ous(ou_paths: dict[str, dict[str, str]]): + created = {} + maxdepth = 5 + for i in range(1, maxdepth): + tree = {k: v for k, v in ou_paths.items() if v["depth"] == i} + for k, v in tree.items(): + ou_name = v["name"] + parent_id = None + parent_path = v["parent_path"] + if parent_path == "/": + parent_id = odb.dm.root.id + else: + parent_id = created_ous[parent_path].id + data = odb.api( + "create_organizational_unit", name=ou_name, parent_id=parent_id + ).get("organizational_unit") + created[k] = OrganizationalUnit(**data) + return created + + +created_ous = create_ous(ou_paths) +parent_map = {"/": odb.dm.root.id} +for ou_path, ou in created_ous.items(): + parent_map[ou_path] = ou.id + + +def create_accounts(account_paths: list[dict[str, str]], parent_map: dict[str, str]): + created = {} + for account in account_paths: + account_name = account["name"] + email = account["name"] + "@example.com" + parent_id = parent_map[account["parent_path"]] + create_account_status = odb.api( + "create_account", + account_name=account_name, + email=email, + ).get("create_account_status") + account_id = create_account_status["account_id"] + move_account = odb.api( + "move_account", + account_id=account_id, + destination_parent_id=parent_id, + source_parent_id=odb.dm.root.id, + ) + if move_account["response_metadata"]["http_status_code"] != 200: + raise Exception(f"Error creating account {account_name}") + data = odb.api("describe_account", account_id=account_id).get("account") + created[account["path"]] = Account(**data) + return created + + +created_accounts = create_accounts(account_paths, parent_map) + +odb.fetch_ous() +odb.fetch_policies() +odb.fetch_accounts() +odb.fetch_policy_targets() +odb.fetch_effective_policies() +odb.fetch_all_tags() + +with open("test-organization.yaml", "wb") as f: + f.write(odb.to_json().encode()) + +mock.stop() diff --git a/tests/test.py.bak b/tests/test.py.bak deleted file mode 100644 index cd13eea..0000000 --- a/tests/test.py.bak +++ /dev/null @@ -1,30 +0,0 @@ -import os - -import pytest -from moto import mock_organizations - -# from aws_data_tools.client import APIClient -from aws_data_tools.builders.organizations import OrganizationDataBuilder - - -@pytest.fixture(scope="function") -def aws_credentials(): - """Mocked AWS Credentials for moto""" - os.environ["AWS_ACCESS_KEY_ID"] = "testing" - os.environ["AWS_SECRET_ACCESS_KEY"] = "testing" - os.environ["AWS_SECURITY_TOKEN"] = "testing" - os.environ["AWS_SESSION_TOKEN"] = "testing" - - -@pytest.fixture -@mock_organizations -def organization_data_builder(): - """Initialize a connected builder for Organizations""" - return OrganizationDataBuilder(init_connect=True) - - -@mock_organizations -def test_fetch_organization(aws_credentials, organization_data_builder): - """Test initial querying of Organization data with the builder""" - organization_data_builder.fetch_organization() - data = organization_data_builder.as_json() diff --git a/tests/test_builders_organization.py b/tests/test_builders_organization.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/test_client.py b/tests/test_client.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/test_models_base.py b/tests/test_models_base.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/test_models_organizations.py b/tests/test_models_organizations.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/test_utils.py b/tests/test_utils.py deleted file mode 100644 index e69de29..0000000