From 0b90125deebd5f9e400002591356a188763369d2 Mon Sep 17 00:00:00 2001 From: Vincent Claes Date: Tue, 1 Jun 2021 08:57:13 +0200 Subject: [PATCH 1/6] add setup.py file --- setup.py | 44 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 setup.py diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..33d34fb --- /dev/null +++ b/setup.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +from setuptools import setup + +packages = \ +['datajob', 'datajob.glue', 'datajob.package', 'datajob.stepfunctions'] + +package_data = \ +{'': ['*']} + +install_requires = \ +['aws-cdk.aws-glue>=1.87.1,<2.0.0', + 'aws-cdk.aws-s3-deployment>=1.87.1,<2.0.0', + 'aws-cdk.cloudformation-include>=1.87.1,<2.0.0', + 'aws-cdk.core>=1.87.1,<2.0.0', + 'aws-empty-bucket>=2.4.0,<3.0.0', + 'contextvars>=2.4,<3.0', + 'dephell>=0.8.3,<0.9.0', + 'rich>=9.13.0,<10.0.0', + 'stepfunctions>=1.1.2,<2.0.0', + 'typer>=0.3.2,<0.4.0'] + +entry_points = \ +{'console_scripts': ['datajob = datajob.datajob:run']} + +setup_kwargs = { + 'name': 'datajob', + 'version': '0.7.0', + 'description': 'Build and deploy a serverless data pipeline with no effort on AWS.', + 'long_description': '![logo](./assets/logo.png)\n\n
\n Build and deploy a serverless data pipeline on AWS with no effort.
\n Our goal is to let developers think about the business logic, datajob does the rest...\n
\n
\n
\n
\n\n\n- We support creating and deploying code to python shell / pyspark Glue jobs.\n- Orchestrate the glue jobs using stepfunctions as simple as `task1 >> [task2,task3] >> task4`\n- Let us [know](https://github.com/vincentclaes/datajob/discussions) what you want to see next.\n\n> Dependencies are [AWS CDK](https://github.com/aws/aws-cdk) and [Step Functions SDK for data science](https://github.com/aws/aws-step-functions-data-science-sdk-python)
\n\n# Installation\n\n Datajob can be installed using pip.
\n Beware that we depend on [aws cdk cli](https://github.com/aws/aws-cdk)!\n\n pip install datajob\n npm install -g aws-cdk@1.98.0 # latest version of datajob depends this version\n\n# Quickstart\n\nWe have a simple data pipeline composed of [2 glue jobs](./examples/data_pipeline_with_packaged_project/glue_jobs/) orchestrated sequentially using step functions.\n\n```python\nimport pathlib\nfrom aws_cdk import core\n\nfrom datajob.datajob_stack import DataJobStack\nfrom datajob.glue.glue_job import GlueJob\nfrom datajob.stepfunctions.stepfunctions_workflow import StepfunctionsWorkflow\n\n\ncurrent_dir = pathlib.Path(__file__).parent.absolute()\n\napp = core.App()\n\n\nwith DataJobStack(scope=app, id="data-pipeline-pkg", project_root=current_dir) as datajob_stack:\n\n task1 = GlueJob(\n datajob_stack=datajob_stack, name="task1", job_path="glue_jobs/task1.py"\n )\n\n task2 = GlueJob(\n datajob_stack=datajob_stack, name="task2", job_path="glue_jobs/task2.py"\n )\n\n with StepfunctionsWorkflow(datajob_stack=datajob_stack, name="workflow") as step_functions_workflow:\n task1 >> task2\n\napp.synth()\n\n```\n\nWe add the above code in a file called `datajob_stack.py` in the [root of the project](./examples/data_pipeline_with_packaged_project/).\n\n\n### Configure CDK\nFollow the steps [here](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html#cli-configure-quickstart-config) to configure your credentials.\n\n```shell script\nexport AWS_PROFILE=default\n# use the aws cli to get your account number\nexport AWS_ACCOUNT=$(aws sts get-caller-identity --query Account --output text --profile $AWS_PROFILE)\nexport AWS_DEFAULT_REGION=us-east-2\n\ncdk bootstrap aws://$AWS_ACCOUNT/$AWS_DEFAULT_REGION\n```\n\n### Deploy\n\n```shell\nexport STAGE=$AWS_ACCOUNT\ncd examples/data_pipeline_with_packaged_project\ndatajob deploy --config datajob_stack.py --stage $STAGE --package setuppy\n```\nDatajob will create s3 buckets based on the `stage` variable.\nThe stage variable will typically be something like "dev", "stg", "prd", ...\nbut since S3 buckets need to be globally unique, for this example we will use our `$AWS_ACCOUNT` for the `--stage` parameter.\n\n
\nuse cdk cli\n\n```shell script\ncd examples/data_pipeline_with_packaged_project\npython setup.py bdist_wheel\ncdk deploy --app "python datajob_stack.py" -c stage=$STAGE\n```\n
\n\n### Run\n\n```shell script\ndatajob execute --state-machine data-pipeline-pkg-$STAGE-workflow\n```\nThe step function state machine name is constructed as `--`.\nThe terminal will show a link to the step functions page to follow up on your pipeline run.\n\n### Destroy\n\n```shell script\ndatajob destroy --config datajob_stack.py --stage $STAGE\n```\n\n
\nuse cdk cli\n\n```shell script\ncdk destroy --app "python datajob_stack.py" -c stage=$STAGE\n```\n
\n\n> Note: you can use any cdk arguments in the datajob cli\n\n# Functionality\n\n
\nUsing datajob\'s S3 data bucket\n\nDynamically reference the `datajob_stack` data bucket name to the arguments of your GlueJob by calling\n`datajob_stack.context.data_bucket_name`.\n\n```python\nimport pathlib\n\nfrom aws_cdk import core\nfrom datajob.datajob_stack import DataJobStack\nfrom datajob.glue.glue_job import GlueJob\nfrom datajob.stepfunctions.stepfunctions_workflow import StepfunctionsWorkflow\n\ncurrent_dir = str(pathlib.Path(__file__).parent.absolute())\n\napp = core.App()\n\nwith DataJobStack(\n scope=app, id="datajob-python-pyspark", project_root=current_dir\n) as datajob_stack:\n\n pyspark_job = GlueJob(\n datajob_stack=datajob_stack,\n name="pyspark-job",\n job_path="glue_job/glue_pyspark_example.py",\n job_type="glueetl",\n glue_version="2.0", # we only support glue 2.0\n python_version="3",\n worker_type="Standard", # options are Standard / G.1X / G.2X\n number_of_workers=1,\n arguments={\n "--source": f"s3://{datajob_stack.context.data_bucket_name}/raw/iris_dataset.csv",\n "--destination": f"s3://{datajob_stack.context.data_bucket_name}/target/pyspark_job/iris_dataset.parquet",\n },\n )\n\n with StepfunctionsWorkflow(datajob_stack=datajob_stack, name="workflow") as sfn:\n pyspark_job >> ...\n\n```\n\ndeploy to stage `my-stage`:\n\n```shell\ndatajob deploy --config datajob_stack.py --stage my-stage --package setuppy\n```\n\n`datajob_stack.context.data_bucket_name` will evaluate to `datajob-python-pyspark-my-stage`\n\nyou can find this example [here](./examples/data_pipeline_pyspark/glue_job/glue_pyspark_example.py)\n\n
\n\n
\nDeploy files to deployment bucket\n\nSpecify the path to the folder we would like to include in the deployment bucket.\n\n```python\n\nfrom aws_cdk import core\nfrom datajob.datajob_stack import DataJobStack\n\napp = core.App()\n\nwith DataJobStack(\n scope=app, id="some-stack-name", include_folder="path/to/folder/"\n) as datajob_stack:\n\n ...\n\n```\n\n
\n\n
\nPackage project\n\nPackage you project using [poetry](https://python-poetry.org/)\n\n```shell\ndatajob deploy --config datajob_stack.py --package poetry\n```\nPackage you project using [setup.py](./examples/data_pipeline_with_packaged_project)\n```shell\ndatajob deploy --config datajob_stack.py --package setuppy\n```\n
\n\n
\nUsing Pyspark\n\n```python\nimport pathlib\n\nfrom aws_cdk import core\nfrom datajob.datajob_stack import DataJobStack\nfrom datajob.glue.glue_job import GlueJob\nfrom datajob.stepfunctions.stepfunctions_workflow import StepfunctionsWorkflow\n\ncurrent_dir = str(pathlib.Path(__file__).parent.absolute())\n\napp = core.App()\n\nwith DataJobStack(\n scope=app, id="datajob-python-pyspark", project_root=current_dir\n) as datajob_stack:\n\n pyspark_job = GlueJob(\n datajob_stack=datajob_stack,\n name="pyspark-job",\n job_path="glue_job/glue_pyspark_example.py",\n job_type="glueetl",\n glue_version="2.0", # we only support glue 2.0\n python_version="3",\n worker_type="Standard", # options are Standard / G.1X / G.2X\n number_of_workers=1,\n arguments={\n "--source": f"s3://{datajob_stack.context.data_bucket_name}/raw/iris_dataset.csv",\n "--destination": f"s3://{datajob_stack.context.data_bucket_name}/target/pyspark_job/iris_dataset.parquet",\n },\n )\n```\nfull example can be found in [examples/data_pipeline_pyspark](examples/data_pipeline_pyspark]).\n
\n\n
\nOrchestrate stepfunctions tasks in parallel\n\n```python\n# task1 and task2 are orchestrated in parallel.\n# task3 will only start when both task1 and task2 have succeeded.\n[task1, task2] >> task3\n```\n\n
\n\n
\nOrchestrate 1 stepfunction task\n\nUse the [Ellipsis](https://docs.python.org/dev/library/constants.html#Ellipsis) object to be able to orchestrate 1 job via step functions.\n\n```python\nsome_task >> ...\n```\n\n
\n\n\n# Datajob in depth\n\nThe `datajob_stack` is the instance that will result in a cloudformation stack.\nThe path in `project_root` helps `datajob_stack` locate the root of the project where\nthe setup.py/poetry pyproject.toml file can be found, as well as the `dist/` folder with the wheel of your project .\n\n```python\nimport pathlib\nfrom aws_cdk import core\n\nfrom datajob.datajob_stack import DataJobStack\n\ncurrent_dir = pathlib.Path(__file__).parent.absolute()\napp = core.App()\n\nwith DataJobStack(\n scope=app, id="data-pipeline-pkg", project_root=current_dir\n) as datajob_stack:\n\n ...\n```\n\nWhen __entering the contextmanager__ of DataJobStack:\n\nA [DataJobContext](./datajob/datajob_stack.py#L48) is initialized\nto deploy and run a data pipeline on AWS.\nThe following resources are created:\n1) "data bucket"\n - an S3 bucket that you can use to dump ingested data, dump intermediate results and the final output.\n - you can access the data bucket as a [Bucket](https://docs.aws.amazon.com/cdk/api/latest/python/aws_cdk.aws_s3/Bucket.html) object via ```datajob_stack.context.data_bucket```\n - you can access the data bucket name via ```datajob_stack.context.data_bucket_name```\n2) "deployment bucket"\n - an s3 bucket to deploy code, artifacts, scripts, config, files, ...\n - you can access the deployment bucket as a [Bucket](https://docs.aws.amazon.com/cdk/api/latest/python/aws_cdk.aws_s3/Bucket.html) object via ```datajob_stack.context.deployment_bucket```\n - you can access the deployment bucket name via ```datajob_stack.context.deployment_bucket_name```\n\nwhen __exiting the context manager__ all the resources of our DataJobStack object are created.\n\n
\nWe can write the above example more explicitly...\n\n```python\nimport pathlib\nfrom aws_cdk import core\n\nfrom datajob.datajob_stack import DataJobStack\nfrom datajob.glue.glue_job import GlueJob\nfrom datajob.stepfunctions.stepfunctions_workflow import StepfunctionsWorkflow\n\napp = core.App()\n\ncurrent_dir = pathlib.Path(__file__).parent.absolute()\n\napp = core.App()\n\ndatajob_stack = DataJobStack(scope=app, id="data-pipeline-pkg", project_root=current_dir)\ndatajob_stack.init_datajob_context()\n\ntask1 = GlueJob(datajob_stack=datajob_stack, name="task1", job_path="glue_jobs/task1.py")\ntask2 = GlueJob(datajob_stack=datajob_stack, name="task2", job_path="glue_jobs/task2.py")\n\nwith StepfunctionsWorkflow(datajob_stack=datajob_stack, name="workflow") as step_functions_workflow:\n task1 >> task2\n\ndatajob_stack.create_resources()\napp.synth()\n```\n
\n\n# Ideas\n\nAny suggestions can be shared by starting a [discussion](https://github.com/vincentclaes/datajob/discussions)\n\nThese are the ideas, we find interesting to implement;\n\n- add a time based trigger to the step functions workflow.\n- add an s3 event trigger to the step functions workflow.\n- add a lambda that copies data from one s3 location to another.\n- add an sns that notifies in case of any failure (slack/email)\n- version your data pipeline.\n- cli command to view the logs / glue jobs / s3 bucket\n- implement sagemaker services\n - processing jobs\n - hyperparameter tuning jobs\n - training jobs\n- implement lambda\n- implement ECS Fargate\n- create a serverless UI that follows up on the different pipelines deployed on possibly different AWS accounts using Datajob\n\n> [Feedback](https://github.com/vincentclaes/datajob/discussions) is much appreciated!\n', + 'author': 'Vincent Claes', + 'author_email': 'vincent.v.claes@gmail.com', + 'maintainer': None, + 'maintainer_email': None, + 'url': 'https://github.com/vincentclaes/datajob', + 'packages': packages, + 'package_data': package_data, + 'install_requires': install_requires, + 'entry_points': entry_points, + 'python_requires': '>=3.6.1,<4.0.0', +} + + +setup(**setup_kwargs) + From 8e420391a25572341e1d6bcbee199ae970ab926e Mon Sep 17 00:00:00 2001 From: Vincent Claes Date: Thu, 10 Jun 2021 13:20:04 +0200 Subject: [PATCH 2/6] remove dephel from deps --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 10d4d70..23374f7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,7 +17,6 @@ python = "^3.6.1" stepfunctions = "^1.1.2" contextvars = "^2.4" typer = "^0.3.2" -dephell = "^0.8.3" aws-empty-bucket = "^2.4.0" "aws-cdk.core" = "^1.87.1" "aws-cdk.aws-glue" = "^1.87.1" From 72cddc0da5d9bd356b259de3465e082989981b86 Mon Sep 17 00:00:00 2001 From: Vincent Claes Date: Fri, 18 Jun 2021 15:49:10 +0200 Subject: [PATCH 3/6] implement notification --- .pre-commit-config.yaml | 4 +- datajob/datajob_base.py | 11 +- datajob/datajob_stack.py | 6 +- datajob/sns/__init__.py | 0 datajob/sns/sns.py | 67 ++ .../stepfunctions/stepfunctions_workflow.py | 64 ++ .../test_stepfunctions_workflow.py | 48 +- poetry.lock | 762 +++++------------- pyproject.toml | 4 +- setup.py | 44 - 10 files changed, 380 insertions(+), 630 deletions(-) create mode 100644 datajob/sns/__init__.py create mode 100644 datajob/sns/sns.py delete mode 100644 setup.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 327253a..6e6e4e9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,12 +2,12 @@ # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.3.0 + rev: v4.0.1 hooks: - id: check-yaml - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/psf/black - rev: 19.3b0 + rev: 21.6b0 hooks: - id: black diff --git a/datajob/datajob_base.py b/datajob/datajob_base.py index ba2820e..7b35c62 100644 --- a/datajob/datajob_base.py +++ b/datajob/datajob_base.py @@ -13,13 +13,14 @@ def __init__(self, datajob_stack, name, **kwargs): assert isinstance( datajob_stack, DataJobStack ), f"we expect the scope argument to be of type {DataJobStack}" + self.datajob_stack = datajob_stack self.name = name - self.project_root = datajob_stack.project_root - self.stage = datajob_stack.stage - self.unique_name = f"{datajob_stack.unique_stack_name}-{self.name}" - self.context = datajob_stack.context + self.project_root = self.datajob_stack.project_root + self.stage = self.datajob_stack.stage + self.unique_name = f"{self.datajob_stack.unique_stack_name}-{self.name}" + self.context = self.datajob_stack.context logger.info(f"adding job {self} to stack workflow resources") - datajob_stack.resources.append(self) + self.datajob_stack.resources.append(self) @abstractmethod def create(self): diff --git a/datajob/datajob_stack.py b/datajob/datajob_stack.py index ccf2509..cafff00 100644 --- a/datajob/datajob_stack.py +++ b/datajob/datajob_stack.py @@ -33,8 +33,10 @@ def __init__( self.scope = scope self.stage = self.get_stage(stage) self.unique_stack_name = self._create_unique_stack_name(id, self.stage) - env = DataJobStack._create_environment_object(account=account, region=region) - super().__init__(scope=scope, id=self.unique_stack_name, env=env, **kwargs) + self.env = DataJobStack._create_environment_object( + account=account, region=region + ) + super().__init__(scope=scope, id=self.unique_stack_name, env=self.env, **kwargs) self.project_root = project_root self.include_folder = include_folder self.resources = [] diff --git a/datajob/sns/__init__.py b/datajob/sns/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/datajob/sns/sns.py b/datajob/sns/sns.py new file mode 100644 index 0000000..8438057 --- /dev/null +++ b/datajob/sns/sns.py @@ -0,0 +1,67 @@ +from typing import Union + +from datajob.datajob_base import DataJobBase +from aws_cdk import core +from aws_cdk import aws_sns +from aws_cdk import aws_sns_subscriptions +from aws_cdk.core import Arn, ArnComponents + + +class SnsTopic(DataJobBase): + def __init__( + self, + datajob_stack: core.Construct, + name: str, + notification: Union[str, list], + **kwargs + ): + """ + :param datajob_stack: aws cdk core construct object. + :param name: name for the SNS Topic. + :param notification: email address as string or list of email addresses to be subscribed. + :param kwargs: + """ + super().__init__(datajob_stack, name, **kwargs) + self.notification = notification + self.sns_topic = None + + def create(self): + self.sns_topic = aws_sns.Topic( + scope=self, + id=self.unique_name, + display_name=self.unique_name, + topic_name=self.unique_name, + ) + self.add_email_subscription() + + def add_email_subscription(self) -> None: + """ + Add an email or a list of emails as subscribers to a topic. + :param sns_topic: an SNS Topic instance of aws cdk + :param notification: email address as string or list of email addresses to be subscribed. + :return: None + """ + if isinstance(self.notification, list): + for email in self.notification: + self.sns_topic.add_subscription( + aws_sns_subscriptions.EmailSubscription(email) + ) + else: + self.sns_topic.add_subscription( + aws_sns_subscriptions.EmailSubscription(self.notification) + ) + + def get_topic_arn(self) -> str: + """ + The ARN will be formatted as follows: + arn:{partition}:{service}:{region}:{account}:{resource}{sep}{resource-name} + :return: return a well formatted arn string + """ + arn_components = ArnComponents( + partition="aws", + service="sns", + region=self.datajob_stack.env.region, + account=self.datajob_stack.env.account, + resource=self.unique_name, + ) + return Arn.format(components=arn_components, stack=self.datajob_stack) diff --git a/datajob/stepfunctions/stepfunctions_workflow.py b/datajob/stepfunctions/stepfunctions_workflow.py index 1a9575a..bd95d6e 100644 --- a/datajob/stepfunctions/stepfunctions_workflow.py +++ b/datajob/stepfunctions/stepfunctions_workflow.py @@ -2,12 +2,15 @@ import tempfile import uuid from pathlib import Path +from typing import Union import boto3 import contextvars from aws_cdk import aws_iam as iam from aws_cdk import cloudformation_include as cfn_inc from aws_cdk import core +from stepfunctions.steps import Catch, Pass, Fail +from stepfunctions.steps.service import SnsPublishStep from stepfunctions import steps from stepfunctions.steps.compute import GlueStartJobRunStep from stepfunctions.steps.states import Parallel @@ -15,6 +18,7 @@ from datajob import logger from datajob.datajob_base import DataJobBase +from datajob.sns.sns import SnsTopic __workflow = contextvars.ContextVar("workflow") @@ -36,6 +40,7 @@ def __init__( self, datajob_stack: core.Construct, name: str, + notification: Union[str, list] = None, role: iam.Role = None, region: str = None, **kwargs, @@ -53,6 +58,7 @@ def __init__( self.region = ( region if region is not None else os.environ.get("AWS_DEFAULT_REGION") ) + self.notification = self._setup_notification(notification) def add_task(self, task_other): """add a task to the workflow we would like to orchestrate.""" @@ -87,6 +93,9 @@ def _build_workflow(self): f"creating a chain from all the different steps. \n {self.chain_of_tasks}" ) workflow_definition = steps.Chain(self.chain_of_tasks) + workflow_definition = self._integrate_notification_in_workflow( + workflow_definition=workflow_definition + ) logger.debug(f"creating a workflow with name {self.unique_name}") self.client = boto3.client("stepfunctions") self.workflow = Workflow( @@ -104,6 +113,61 @@ def create(self): text_file.write(self.workflow.get_cloudformation_template()) cfn_inc.CfnInclude(self, self.unique_name, template_file=sfn_cf_file_path) + def _setup_notification( + self, notification: Union[str, list] + ) -> Union[SnsTopic, None]: + """Create a SnsTopic if the notification parameter is defined. + + :param notification: email address as string or list of email addresses to be subscribed. + :return: + """ + if notification is not None: + name = f"{self.name}-notification" + return SnsTopic(self.datajob_stack, name, notification) + + def _integrate_notification_in_workflow( + self, workflow_definition: steps.Chain + ) -> steps.Chain: + """If a notification is defined we configure an SNS with email subscription to alert the user + if the stepfunctions workflow failed or succeeded. + + :param workflow_definition: the workflow definition that contains all the steps we want to execute. + :return: if notification is set, we adapt the workflow to include an SnsPublishStep on failure or on success. + If notification is not set, we return the workflow as we received it. + """ + if self.notification: + logger.debug( + "A notification is configured, " + "implementing a notification on Error or when the stepfunctions workflow succeeds." + ) + failure_notification = SnsPublishStep( + "FailureNotification", + parameters={ + "TopicArn": self.notification.get_topic_arn(), + "Message": f"Stepfunctions workflow {self.unique_name} Failed.", + }, + ) + pass_notification = SnsPublishStep( + "SuccessNotification", + parameters={ + "TopicArn": self.notification.get_topic_arn(), + "Message": f"Stepfunctions workflow {self.unique_name} Succeeded.", + }, + ) + + catch_error = Catch( + error_equals=["States.ALL"], next_step=failure_notification + ) + workflow_with_notification = Parallel(state_id="notification") + workflow_with_notification.add_branch(workflow_definition) + workflow_with_notification.add_catch(catch_error) + workflow_with_notification.next(pass_notification) + return steps.Chain([workflow_with_notification]) + logger.debug( + "No notification is configured, returning the workflow definition." + ) + return workflow_definition + def __enter__(self): """first steps we have to do when entering the context manager.""" logger.info(f"creating step functions workflow for {self.unique_name}") diff --git a/datajob_tests/stepfunctions/test_stepfunctions_workflow.py b/datajob_tests/stepfunctions/test_stepfunctions_workflow.py index b004627..8f26a77 100644 --- a/datajob_tests/stepfunctions/test_stepfunctions_workflow.py +++ b/datajob_tests/stepfunctions/test_stepfunctions_workflow.py @@ -1,6 +1,9 @@ +import json import os import unittest +import io +import yaml from aws_cdk import core from moto import mock_stepfunctions from stepfunctions.steps.compute import GlueStartJobRunStep @@ -82,7 +85,9 @@ def test_create_tasks_for_orchestration_starts_with_parallel_flow_successfully( isinstance(a_step_functions_workflow.chain_of_tasks[1], GlueStartJobRunStep) ) - def test_orchestrate_1_task_successfully(self,): + def test_orchestrate_1_task_successfully( + self, + ): task1 = stepfunctions_workflow.task(SomeMockedClass("task1")) djs = DataJobStack( scope=self.app, @@ -98,3 +103,44 @@ def test_orchestrate_1_task_successfully(self,): self.assertTrue( isinstance(a_step_functions_workflow.chain_of_tasks[0], GlueStartJobRunStep) ) + + @mock_stepfunctions + def test_create_workflow_with_notification_successfully(self): + task1 = stepfunctions_workflow.task(SomeMockedClass("task1")) + task2 = stepfunctions_workflow.task(SomeMockedClass("task2")) + + djs = DataJobStack( + scope=self.app, + id="a-unique-name-3", + stage="stage", + project_root="sampleproject/", + region="eu-west-1", + account="3098726354", + ) + with StepfunctionsWorkflow( + djs, "some-name", notification="email@domain.com" + ) as a_step_functions_workflow: + task1 >> task2 + + with io.StringIO() as f: + f.write(a_step_functions_workflow.workflow.get_cloudformation_template()) + f.seek(0) + cf_template = yaml.load(f) + + sfn_workflow = json.loads( + cf_template.get("Resources") + .get("StateMachineComponent") + .get("Properties") + .get("DefinitionString") + ) + # we expect two notifications; 1 for success and one for failure + self.assertTrue("SuccessNotification" in sfn_workflow.get("States").keys()) + self.assertTrue("FailureNotification" in sfn_workflow.get("States").keys()) + # there is a catch statement in the statemachine + self.assertTrue( + "Catch" in sfn_workflow.get("States").get("notification").keys() + ) + # when implementing a notification we expect a Parallel branch + self.assertEqual( + sfn_workflow.get("States").get("notification").get("Type"), "Parallel" + ) diff --git a/poetry.lock b/poetry.lock index f92eab8..8b7962c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,18 +1,3 @@ -[[package]] -name = "aiohttp" -version = "2.3.10" -description = "Async http client/server framework (asyncio)" -category = "main" -optional = false -python-versions = ">=3.4.2" - -[package.dependencies] -async-timeout = ">=1.2.0" -chardet = "*" -idna-ssl = ">=1.0.0" -multidict = ">=4.0.0" -yarl = ">=1.0.0" - [[package]] name = "appdirs" version = "1.4.4" @@ -21,14 +6,6 @@ category = "dev" optional = false python-versions = "*" -[[package]] -name = "async-timeout" -version = "3.0.1" -description = "Timeout context manager for asyncio programs" -category = "main" -optional = false -python-versions = ">=3.5.3" - [[package]] name = "atomicwrites" version = "1.4.0" @@ -2778,6 +2755,31 @@ future = "*" jsonpickle = "*" wrapt = "*" +[[package]] +name = "black" +version = "21.6b0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +appdirs = "*" +click = ">=7.1.2" +dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} +mypy-extensions = ">=0.4.3" +pathspec = ">=0.8.1,<1" +regex = ">=2020.1.8" +toml = ">=0.10.1" +typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\""} +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.6.0)", "aiohttp-cors (>=0.4.0)"] +python2 = ["typed-ast (>=1.4.2)"] +uvloop = ["uvloop (>=0.15.2)"] + [[package]] name = "boto" version = "2.49.0" @@ -2840,19 +2842,11 @@ attrs = ">=20.1.0" [package.extras] dev = ["bumpversion", "wheel", "watchdog", "flake8", "tox", "coverage", "sphinx", "pytest", "hypothesis", "pendulum", "isort", "black"] -[[package]] -name = "cerberus" -version = "1.3.2" -description = "Lightweight, extensible schema and data validation tool for Python dictionaries." -category = "main" -optional = false -python-versions = ">=2.7" - [[package]] name = "certifi" version = "2020.12.5" description = "Python package for providing Mozilla's CA Bundle." -category = "main" +category = "dev" optional = false python-versions = "*" @@ -2894,7 +2888,7 @@ six = ">=1.11,<2.0" name = "chardet" version = "4.0.0" description = "Universal encoding detector for Python 2 and 3" -category = "main" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" @@ -2975,198 +2969,6 @@ category = "main" optional = false python-versions = ">=3.6, <3.7" -[[package]] -name = "dephell" -version = "0.8.3" -description = "Dependency resolution for Python" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -aiohttp = "*" -attrs = ">=19.2.0" -cerberus = ">=1.3" -certifi = "*" -dephell-archive = ">=0.1.5" -dephell-argparse = ">=0.1.1" -dephell-changelogs = "*" -dephell-discover = ">=0.2.6" -dephell-licenses = ">=0.1.6" -dephell-links = ">=0.1.4" -dephell-markers = ">=1.0.0" -dephell-pythons = ">=0.1.11" -dephell-setuptools = ">=0.2.1" -dephell-shells = ">=0.1.3" -dephell-specifier = ">=0.1.7" -dephell-venvs = ">=0.1.16" -dephell-versioning = "*" -jinja2 = "*" -m2r = "*" -packaging = "*" -requests = "*" -"ruamel.yaml" = "*" -tomlkit = "*" -yaspin = "*" - -[package.extras] -full = ["aiofiles", "appdirs", "autopep8", "bowler", "colorama", "docker", "dockerpty", "fissix", "graphviz", "html5lib", "pygments", "python-gnupg", "tabulate", "yapf"] -tests = ["aioresponses", "pytest", "requests-mock"] -dev = ["aioresponses", "alabaster", "flake8-isort", "isort", "pygments-github-lexers", "pytest", "recommonmark", "requests-mock", "sphinx"] -docs = ["alabaster", "pygments-github-lexers", "recommonmark", "sphinx"] - -[[package]] -name = "dephell-archive" -version = "0.1.7" -description = "pathlib for archives" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -attrs = "*" - -[[package]] -name = "dephell-argparse" -version = "0.1.3" -description = "Argparse on steroids: groups, commands, colors." -category = "main" -optional = false -python-versions = ">=3.5" - -[[package]] -name = "dephell-changelogs" -version = "0.0.1" -description = "Find changelog for github repository, local dir, parse changelog" -category = "main" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -requests = "*" - -[package.extras] -dev = ["pytest", "pytest-xdist"] - -[[package]] -name = "dephell-discover" -version = "0.2.10" -description = "Find project modules and data files (packages and package_data for setup.py)." -category = "main" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -attrs = "*" - -[[package]] -name = "dephell-licenses" -version = "0.1.7" -description = "Get info about OSS licenses" -category = "main" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -attrs = "*" -requests = "*" - -[[package]] -name = "dephell-links" -version = "0.1.5" -description = "Parse dependency links" -category = "main" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -attrs = "*" - -[[package]] -name = "dephell-markers" -version = "1.0.3" -description = "Work with environment markers (PEP-496)" -category = "main" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -attrs = "*" -dephell-specifier = "*" -packaging = "*" - -[[package]] -name = "dephell-pythons" -version = "0.1.15" -description = "Work with python versions" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -attrs = "*" -dephell-specifier = "*" -packaging = "*" - -[[package]] -name = "dephell-setuptools" -version = "0.2.4" -description = "Read metainfo from setup.py" -category = "main" -optional = false -python-versions = ">=3.5" - -[package.extras] -dev = ["mypy", "pkginfo", "pytest"] - -[[package]] -name = "dephell-shells" -version = "0.1.5" -description = "activate virtual environment for current shell" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -attrs = "*" -pexpect = "*" -shellingham = "*" - -[[package]] -name = "dephell-specifier" -version = "0.2.2" -description = "Work with version specifiers." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -packaging = ">=17.1" - -[[package]] -name = "dephell-venvs" -version = "0.1.18" -description = "Manage virtual environments" -category = "main" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -attrs = "*" -dephell-pythons = "*" -requests = "*" - -[[package]] -name = "dephell-versioning" -version = "0.1.2" -description = "Library for bumping project version like a pro" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -packaging = "*" - [[package]] name = "distlib" version = "0.3.1" @@ -3193,14 +2995,6 @@ websocket-client = ">=0.32.0" ssh = ["paramiko (>=2.4.2)"] tls = ["pyOpenSSL (>=17.5.0)", "cryptography (>=1.3.4)", "idna (>=2.0.0)"] -[[package]] -name = "docutils" -version = "0.16" -description = "Docutils -- Python Documentation Utilities" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - [[package]] name = "ecdsa" version = "0.14.1" @@ -3243,21 +3037,10 @@ license = ["editdistance"] name = "idna" version = "2.10" description = "Internationalized Domain Names in Applications (IDNA)" -category = "main" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -[[package]] -name = "idna-ssl" -version = "1.1.0" -description = "Patch ssl.match_hostname for Unicode(idna) domains support" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -idna = ">=2.0" - [[package]] name = "immutables" version = "0.14" @@ -3309,7 +3092,7 @@ python-versions = "*" name = "jinja2" version = "2.11.2" description = "A very fast and expressive template engine." -category = "main" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" @@ -3406,34 +3189,14 @@ six = ">=1.11.0" format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"] -[[package]] -name = "m2r" -version = "0.2.1" -description = "Markdown and reStructuredText in a single file." -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -docutils = "*" -mistune = "*" - [[package]] name = "markupsafe" version = "1.1.1" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" +category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" -[[package]] -name = "mistune" -version = "0.8.4" -description = "The fastest markdown parser in pure Python" -category = "main" -optional = false -python-versions = "*" - [[package]] name = "mock" version = "4.0.3" @@ -3485,6 +3248,7 @@ PyYAML = ">=5.1" requests = ">=2.5" responses = ">=0.9.0" six = ">1.9" +sshpubkeys = {version = ">=3.1.0", markers = "python_version > \"3\""} werkzeug = "*" xmltodict = "*" zipp = "*" @@ -3504,12 +3268,12 @@ server = ["cryptography (>=2.3.0)", "PyYAML (>=5.1)", "python-jose[cryptography] xray = ["aws-xray-sdk (>=0.93,!=0.96)"] [[package]] -name = "multidict" -version = "5.1.0" -description = "multidict implementation" -category = "main" +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" optional = false -python-versions = ">=3.6" +python-versions = "*" [[package]] name = "nodeenv" @@ -3539,15 +3303,12 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" pyparsing = ">=2.0.2" [[package]] -name = "pexpect" -version = "4.8.0" -description = "Pexpect allows easy control of interactive console applications." -category = "main" +name = "pathspec" +version = "0.8.1" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" optional = false -python-versions = "*" - -[package.dependencies] -ptyprocess = ">=0.5" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pluggy" @@ -3604,14 +3365,6 @@ python-versions = "*" protobuf = ">=2.3.0" six = "*" -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -category = "main" -optional = false -python-versions = "*" - [[package]] name = "publication" version = "0.0.3" @@ -3745,11 +3498,19 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +[[package]] +name = "regex" +version = "2021.4.4" +description = "Alternative regular expression module, to replace re." +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "requests" version = "2.25.1" description = "Python HTTP for Humans." -category = "main" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" @@ -3808,29 +3569,6 @@ python-versions = ">=3.5, <4" [package.dependencies] pyasn1 = ">=0.1.3" -[[package]] -name = "ruamel.yaml" -version = "0.16.12" -description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -category = "main" -optional = false -python-versions = "*" - -[package.dependencies] -"ruamel.yaml.clib" = {version = ">=0.1.2", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.9\""} - -[package.extras] -docs = ["ryd"] -jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] - -[[package]] -name = "ruamel.yaml.clib" -version = "0.2.2" -description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -category = "main" -optional = false -python-versions = "*" - [[package]] name = "s3transfer" version = "0.3.4" @@ -3878,14 +3616,6 @@ python-versions = ">=3.6" [package.dependencies] numpy = ">=1.14.5" -[[package]] -name = "shellingham" -version = "1.3.2" -description = "Tool to Detect Surrounding Shell" -category = "main" -optional = false -python-versions = "!=3.0,!=3.1,!=3.2,!=3.3,>=2.6" - [[package]] name = "six" version = "1.15.0" @@ -3902,6 +3632,21 @@ category = "main" optional = false python-versions = ">=2.7" +[[package]] +name = "sshpubkeys" +version = "3.3.1" +description = "SSH public key parser" +category = "dev" +optional = false +python-versions = ">=3" + +[package.dependencies] +cryptography = ">=2.1.4" +ecdsa = ">=0.13" + +[package.extras] +dev = ["twine", "wheel", "yapf"] + [[package]] name = "stepfunctions" version = "1.1.2" @@ -3927,12 +3672,12 @@ optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] -name = "tomlkit" -version = "0.7.0" -description = "Style preserving TOML library" -category = "main" +name = "typed-ast" +version = "1.4.3" +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "*" [[package]] name = "typer" @@ -4031,27 +3776,6 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -[[package]] -name = "yarl" -version = "1.6.3" -description = "Yet another URL library" -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" -typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} - -[[package]] -name = "yaspin" -version = "1.3.0" -description = "Yet Another Terminal Spinner" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - [[package]] name = "zipp" version = "3.4.0" @@ -4066,42 +3790,14 @@ testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake [metadata] lock-version = "1.1" -python-versions = "^3.6.1" -content-hash = "a555a81e9a2897171e163df20d8da059df47cb265470bc410d0079aeeb423d55" +python-versions = "^3.6.2" +content-hash = "92321970f85579982505ad2a4a964e053ccad67d1d773eb2ded97b2a5b5002dc" [metadata.files] -aiohttp = [ - {file = "aiohttp-2.3.10-cp34-cp34m-macosx_10_10_x86_64.whl", hash = "sha256:834f687b806fbf49cb135b5a208b5c27338e19c219d6e09e9049936e01e8bea8"}, - {file = "aiohttp-2.3.10-cp34-cp34m-macosx_10_11_x86_64.whl", hash = "sha256:6b8c5a00432b8a5a083792006e8fdfb558b8b10019ce254200855264d3a25895"}, - {file = "aiohttp-2.3.10-cp34-cp34m-macosx_10_12_x86_64.whl", hash = "sha256:7b407c22b0ab473ffe0a7d3231f2084a8ae80fdb64a31842b88d57d6b7bdab7c"}, - {file = "aiohttp-2.3.10-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:14821eb8613bfab9118be3c55afc87bf4cef97689896fa0874c6877b117afbeb"}, - {file = "aiohttp-2.3.10-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:8f32a4e157bad9c60ebc38c3bb93fcc907a020b017ddf8f7ab1580390e21940e"}, - {file = "aiohttp-2.3.10-cp34-cp34m-win32.whl", hash = "sha256:82a9068d9cb15eb2d99ecf39f8d56b4ed9f931a77a3622a0de747465fd2a7b96"}, - {file = "aiohttp-2.3.10-cp34-cp34m-win_amd64.whl", hash = "sha256:7ac6378ae364d8e5e5260c7224ea4a1965cb6f4719f15d0552349d0b0cc93953"}, - {file = "aiohttp-2.3.10-cp35-cp35m-macosx_10_10_x86_64.whl", hash = "sha256:5a952d4af7de5f78dfb3206dbc352717890b37d447f0bbd4b5969b3c8bb713af"}, - {file = "aiohttp-2.3.10-cp35-cp35m-macosx_10_11_x86_64.whl", hash = "sha256:b25c7720c495048ed658086a29925ab485ac7ececf1b346f2b459e5431d85016"}, - {file = "aiohttp-2.3.10-cp35-cp35m-macosx_10_12_x86_64.whl", hash = "sha256:528b0b811b6260a79222b055664b82093d01f35fe5c82521d8659cb2b28b8044"}, - {file = "aiohttp-2.3.10-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:46ace48789865a89992419205024ae451d577876f9919fbb0f22f71189822dea"}, - {file = "aiohttp-2.3.10-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:5436ca0ed752bb05a399fc07dc86dc23c756db523a3b7d5da46a457eacf4c4b5"}, - {file = "aiohttp-2.3.10-cp35-cp35m-win32.whl", hash = "sha256:f5e7d41d924a1d5274060c467539ee0c4f3bab318c1671ad65abd91f6b637baf"}, - {file = "aiohttp-2.3.10-cp35-cp35m-win_amd64.whl", hash = "sha256:a8c12f3184c7cad8f66cae6c945d2c97e598b0cb7afd655a5b9471475e67f30e"}, - {file = "aiohttp-2.3.10-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:756fc336a29c551b02252685f01bc87116bc9b04bbd02c1a6b8a96b3c6ad713b"}, - {file = "aiohttp-2.3.10-cp36-cp36m-macosx_10_11_x86_64.whl", hash = "sha256:cf790e61c2af0278f39dcedad9a22532bf81fb029c2cd73b1ceba7bea062c908"}, - {file = "aiohttp-2.3.10-cp36-cp36m-macosx_10_12_x86_64.whl", hash = "sha256:44c9cf24e63576244c13265ef0786b56d6751f5fb722225ecc021d6ecf91b4d2"}, - {file = "aiohttp-2.3.10-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:ef1a36a16e72b6689ce0a6c7fc6bd88837d8fef4590b16bd72817644ae1f414d"}, - {file = "aiohttp-2.3.10-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3a4cdb9ca87c099d8ba5eb91cb8f000b60c21f8c1b50c75e04e8777e903bd278"}, - {file = "aiohttp-2.3.10-cp36-cp36m-win32.whl", hash = "sha256:f72bb19cece43483171264584bbaaf8b97717d9c0f244d1ef4a51df1cdb34085"}, - {file = "aiohttp-2.3.10-cp36-cp36m-win_amd64.whl", hash = "sha256:c77e29243a79e376a1b51d71a13df4a61bc54fd4d9597ce3790b8d82ec6eb44d"}, - {file = "aiohttp-2.3.10.tar.gz", hash = "sha256:8adda6583ba438a4c70693374e10b60168663ffa6564c5c75d3c7a9055290964"}, -] appdirs = [ {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, ] -async-timeout = [ - {file = "async-timeout-3.0.1.tar.gz", hash = "sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f"}, - {file = "async_timeout-3.0.1-py3-none-any.whl", hash = "sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3"}, -] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, @@ -4763,6 +4459,10 @@ aws-xray-sdk = [ {file = "aws-xray-sdk-2.6.0.tar.gz", hash = "sha256:abf5b90f740e1f402e23414c9670e59cb9772e235e271fef2bce62b9100cbc77"}, {file = "aws_xray_sdk-2.6.0-py2.py3-none-any.whl", hash = "sha256:076f7c610cd3564bbba3507d43e328fb6ff4a2e841d3590f39b2c3ce99d41e1d"}, ] +black = [ + {file = "black-21.6b0-py3-none-any.whl", hash = "sha256:dfb8c5a069012b2ab1e972e7b908f5fb42b6bbabcba0a788b86dc05067c7d9c7"}, + {file = "black-21.6b0.tar.gz", hash = "sha256:dc132348a88d103016726fe360cb9ede02cecf99b76e3660ce6c596be132ce04"}, +] boto = [ {file = "boto-2.49.0-py2.py3-none-any.whl", hash = "sha256:147758d41ae7240dc989f0039f27da8ca0d53734be0eb869ef16e3adcfa462e8"}, {file = "boto-2.49.0.tar.gz", hash = "sha256:ea0d3b40a2d852767be77ca343b58a9e3a4b00d9db440efb8da74b4e58025e5a"}, @@ -4781,9 +4481,6 @@ cattrs = [ {file = "cattrs-1.1.2-py3-none-any.whl", hash = "sha256:967ce8f99b79f112a500fc03d02c4da669966055ea190b0c59a023af0ae33e5f"}, {file = "cattrs-1.1.2.tar.gz", hash = "sha256:a5873cd4745a74388557730247b4bb005d762f8aba0ab7aa55bcbd190bdf3322"}, ] -cerberus = [ - {file = "Cerberus-1.3.2.tar.gz", hash = "sha256:302e6694f206dd85cb63f13fd5025b31ab6d38c99c50c6d769f8fa0b0f299589"}, -] certifi = [ {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, @@ -4822,6 +4519,7 @@ cffi = [ {file = "cffi-1.14.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:840793c68105fe031f34d6a086eaea153a0cd5c491cde82a74b420edd0a2b909"}, {file = "cffi-1.14.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:b18e0a9ef57d2b41f5c68beefa32317d286c3d6ac0484efd10d6e07491bb95dd"}, {file = "cffi-1.14.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:045d792900a75e8b1e1b0ab6787dd733a8190ffcf80e8c8ceb2fb10a29ff238a"}, + {file = "cffi-1.14.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:7ef7d4ced6b325e92eb4d3502946c78c5367bc416398d387b39591532536734e"}, {file = "cffi-1.14.4-cp39-cp39-win32.whl", hash = "sha256:ba4e9e0ae13fc41c6b23299545e5ef73055213e466bd107953e4a013a5ddd7e3"}, {file = "cffi-1.14.4-cp39-cp39-win_amd64.whl", hash = "sha256:f032b34669220030f905152045dfa27741ce1a6db3324a5bc0b96b6c7420c87b"}, {file = "cffi-1.14.4.tar.gz", hash = "sha256:1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c"}, @@ -4877,62 +4575,6 @@ dataclasses = [ {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"}, ] -dephell = [ - {file = "dephell-0.8.3-py3-none-any.whl", hash = "sha256:3ca3661e2a353b5c67c77034b69b379e360d4c70ce562e8161db32d39064be5a"}, - {file = "dephell-0.8.3.tar.gz", hash = "sha256:a9fcc528a0c6f9f5d721292bdf846e5338e4dca7cd6fef1551fbe71564dfe61e"}, -] -dephell-archive = [ - {file = "dephell-archive-0.1.7.tar.gz", hash = "sha256:bb263492a7d430f9e04cef9a0237b7752cc797ab364bf35e70196af09c73ea37"}, - {file = "dephell_archive-0.1.7-py3-none-any.whl", hash = "sha256:64a688dd8acb780f7d56cdae4622fa01d1e5910fd65788974b3f70fa9a1e517a"}, -] -dephell-argparse = [ - {file = "dephell_argparse-0.1.3-py3-none-any.whl", hash = "sha256:e37a52c511b53e9d6107b606088664754b4b4d9e734578b333e68c46e4ab45b7"}, - {file = "dephell_argparse-0.1.3.tar.gz", hash = "sha256:2ab9b2441f808bb11c338c4849d22ded898cde8325946800ac9e39d2b138735d"}, -] -dephell-changelogs = [ - {file = "dephell_changelogs-0.0.1-py3-none-any.whl", hash = "sha256:963d31346790a3aacc3409bbc7cb0b44cdc0e29c167eec196fb49a131c3035b8"}, - {file = "dephell_changelogs-0.0.1.tar.gz", hash = "sha256:e639a3d08d389e22fbac0cc64181dbe93c4b4ba9f0134e273e6dd3e26ae70b21"}, -] -dephell-discover = [ - {file = "dephell_discover-0.2.10-py3-none-any.whl", hash = "sha256:abf190e9707d4a88f14e91be1f80e996e195b20b5400da2362e98cf19e59a1e4"}, - {file = "dephell_discover-0.2.10.tar.gz", hash = "sha256:a2ad414e5e0fe16c82c537d6a3198afd9818c0c010760eccb23e2d60e5b66df6"}, -] -dephell-licenses = [ - {file = "dephell-licenses-0.1.7.tar.gz", hash = "sha256:f175cec822a32bda5b56442f48dae39efbb5c3851275ecd41cfd7e849ddd2ea6"}, - {file = "dephell_licenses-0.1.7-py3-none-any.whl", hash = "sha256:b0b6c93779c4a8d9a82710ef2d5d0fab72e013f335962dc7363831af48570db5"}, -] -dephell-links = [ - {file = "dephell_links-0.1.5-py3-none-any.whl", hash = "sha256:a86a08fb42da63d903ae3fee9f9e2491be602321204c0df5b53e33cb19ac4dec"}, - {file = "dephell_links-0.1.5.tar.gz", hash = "sha256:28d694142e2827a59d2c301e7185afb52fb8acdb950b1da38308d69e43418eaa"}, -] -dephell-markers = [ - {file = "dephell_markers-1.0.3-py3-none-any.whl", hash = "sha256:54ad6807b087d6c9171efc2d94eda3a9e3cad7ea2ca4b27186789d455a6c730a"}, - {file = "dephell_markers-1.0.3.tar.gz", hash = "sha256:525e17914e705acf8652dd8681fccdec912432a747d8def4720f49416817f2d4"}, -] -dephell-pythons = [ - {file = "dephell_pythons-0.1.15-py3-none-any.whl", hash = "sha256:03132d083d0369683b87d03767dc0f0f88b8d92d5cf19cfdb36d8845b70ecdb2"}, - {file = "dephell_pythons-0.1.15.tar.gz", hash = "sha256:804c29afa2147322aa23e791f591d0204fd1e9983afa7d91e1d1452fc7be1c5c"}, -] -dephell-setuptools = [ - {file = "dephell_setuptools-0.2.4-py3-none-any.whl", hash = "sha256:275f9bec4b276614939ac9efa732a0ae6aef06ae63e3b62371d0f15a19299208"}, - {file = "dephell_setuptools-0.2.4.tar.gz", hash = "sha256:663629e1ebf7b20bf7e372ee2a2e7ebf1a15aeb3bc6d46ad32e1bcb21044ca29"}, -] -dephell-shells = [ - {file = "dephell_shells-0.1.5-py3-none-any.whl", hash = "sha256:3bdb8aba72640c51259dc5cb0ee40c4cd948cb644e5ceedd7e725766575a5225"}, - {file = "dephell_shells-0.1.5.tar.gz", hash = "sha256:77150b732db135d436f41c2c6f12694e6058a8609214117ee80f6c40234ac2d5"}, -] -dephell-specifier = [ - {file = "dephell_specifier-0.2.2-py3-none-any.whl", hash = "sha256:021ad2ab3f3f130b5ac5cefa554c12f0e2dbb35d5d52ad9474a1f2c8b420f7c2"}, - {file = "dephell_specifier-0.2.2.tar.gz", hash = "sha256:b5ec6409a1916980c4861da2cb7538246555bff4b95bef2c952c56bd19eb2de6"}, -] -dephell-venvs = [ - {file = "dephell_venvs-0.1.18-py3-none-any.whl", hash = "sha256:bd3ad440702aa9a9dc21bbab9633537fa395296d40451280d40046d9e3372e6d"}, - {file = "dephell_venvs-0.1.18.tar.gz", hash = "sha256:c7307291b754edba325ab27edeb05d85ee4dd2f1487c48872a1ebfc372bf7a2e"}, -] -dephell-versioning = [ - {file = "dephell_versioning-0.1.2-py3-none-any.whl", hash = "sha256:28f611bd3ec1644e3d6972f901b9aa67a1fe2ed3fe57566f82afd9c43f5a335a"}, - {file = "dephell_versioning-0.1.2.tar.gz", hash = "sha256:9ba7636704af7bd64af5a64ab8efb482c8b0bf4868699722f5e2647763edf8e5"}, -] distlib = [ {file = "distlib-0.3.1-py2.py3-none-any.whl", hash = "sha256:8c09de2c67b3e7deef7184574fc060ab8a793e7adbb183d942c389c8b13c52fb"}, {file = "distlib-0.3.1.zip", hash = "sha256:edf6116872c863e1aa9d5bb7cb5e05a022c519a4594dc703843343a9ddd9bff1"}, @@ -4941,10 +4583,6 @@ docker = [ {file = "docker-4.4.1-py2.py3-none-any.whl", hash = "sha256:e455fa49aabd4f22da9f4e1c1f9d16308286adc60abaf64bf3e1feafaed81d06"}, {file = "docker-4.4.1.tar.gz", hash = "sha256:0604a74719d5d2de438753934b755bfcda6f62f49b8e4b30969a4b0a2a8a1220"}, ] -docutils = [ - {file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"}, - {file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"}, -] ecdsa = [ {file = "ecdsa-0.14.1-py2.py3-none-any.whl", hash = "sha256:e108a5fe92c67639abae3260e43561af914e7fd0d27bae6d2ec1312ae7934dfe"}, {file = "ecdsa-0.14.1.tar.gz", hash = "sha256:64c613005f13efec6541bb0a33290d0d03c27abab5f15fbab20fb0ee162bdd8e"}, @@ -4964,9 +4602,6 @@ idna = [ {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, ] -idna-ssl = [ - {file = "idna-ssl-1.1.0.tar.gz", hash = "sha256:a933e3bb13da54383f9e8f35dc4f9cb9eb9b3b78c6b36f311254d6d0d92c6c7c"}, -] immutables = [ {file = "immutables-0.14-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:860666fab142401a5535bf65cbd607b46bc5ed25b9d1eb053ca8ed9a1a1a80d6"}, {file = "immutables-0.14-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:ce01788878827c3f0331c254a4ad8d9721489a5e65cc43e19c80040b46e0d297"}, @@ -5024,9 +4659,6 @@ jsonschema = [ {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, ] -m2r = [ - {file = "m2r-0.2.1.tar.gz", hash = "sha256:bf90bad66cda1164b17e5ba4a037806d2443f2a4d5ddc9f6a5554a0322aaed99"}, -] markupsafe = [ {file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"}, {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"}, @@ -5046,26 +4678,41 @@ markupsafe = [ {file = "MarkupSafe-1.1.1-cp35-cp35m-win32.whl", hash = "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1"}, {file = "MarkupSafe-1.1.1-cp35-cp35m-win_amd64.whl", hash = "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d"}, {file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d53bc011414228441014aa71dbec320c66468c1030aae3a6e29778a3382d96e5"}, {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473"}, {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:3b8a6499709d29c2e2399569d96719a1b21dcd94410a586a18526b143ec8470f"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:84dee80c15f1b560d55bcfe6d47b27d070b4681c699c572af2e3c7cc90a3b8e0"}, + {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:b1dba4527182c95a0db8b6060cc98ac49b9e2f5e64320e2b56e47cb2831978c7"}, {file = "MarkupSafe-1.1.1-cp36-cp36m-win32.whl", hash = "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66"}, {file = "MarkupSafe-1.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5"}, {file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bf5aa3cbcfdf57fa2ee9cd1822c862ef23037f5c832ad09cfea57fa846dec193"}, {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e"}, {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:6fffc775d90dcc9aed1b89219549b329a9250d918fd0b8fa8d93d154918422e1"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:a6a744282b7718a2a62d2ed9d993cad6f5f585605ad352c11de459f4108df0a1"}, + {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:195d7d2c4fbb0ee8139a6cf67194f3973a6b3042d742ebe0a9ed36d8b6f0c07f"}, {file = "MarkupSafe-1.1.1-cp37-cp37m-win32.whl", hash = "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2"}, {file = "MarkupSafe-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c"}, {file = "MarkupSafe-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15"}, {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2"}, {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:acf08ac40292838b3cbbb06cfe9b2cb9ec78fce8baca31ddb87aaac2e2dc3bc2"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d9be0ba6c527163cbed5e0857c451fcd092ce83947944d6c14bc95441203f032"}, + {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:caabedc8323f1e93231b52fc32bdcde6db817623d33e100708d9a68e1f53b26b"}, {file = "MarkupSafe-1.1.1-cp38-cp38-win32.whl", hash = "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b"}, {file = "MarkupSafe-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d73a845f227b0bfe8a7455ee623525ee656a9e2e749e4742706d80a6065d5e2c"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:98bae9582248d6cf62321dcb52aaf5d9adf0bad3b40582925ef7c7f0ed85fceb"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:2beec1e0de6924ea551859edb9e7679da6e4870d32cb766240ce17e0a0ba2014"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:7fed13866cf14bba33e7176717346713881f56d9d2bcebab207f7a036f41b850"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:6f1e273a344928347c1290119b493a1f0303c52f5a5eae5f16d74f48c15d4a85"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:feb7b34d6325451ef96bc0e36e1a6c0c1c64bc1fbec4b854f4529e51887b1621"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-win32.whl", hash = "sha256:22c178a091fc6630d0d045bdb5992d2dfe14e3259760e713c490da5323866c39"}, + {file = "MarkupSafe-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7d644ddb4dbd407d31ffb699f1d140bc35478da613b441c582aeb7c43838dd8"}, {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"}, ] -mistune = [ - {file = "mistune-0.8.4-py2.py3-none-any.whl", hash = "sha256:88a1051873018da288eee8538d476dffe1262495144b33ecb586c4ab266bb8d4"}, - {file = "mistune-0.8.4.tar.gz", hash = "sha256:59a3429db53c50b5c6bcc8a07f8848cb00d7dc8bdb431a4ab41920d201d4756e"}, -] mock = [ {file = "mock-4.0.3-py3-none-any.whl", hash = "sha256:122fcb64ee37cfad5b3f48d7a7d51875d7031aaf3d8be7c42e2bee25044eee62"}, {file = "mock-4.0.3.tar.gz", hash = "sha256:7d3fbbde18228f4ff2f1f119a45cdffa458b4c0dee32eb4d2bb2f82554bac7bc"}, @@ -5078,44 +4725,9 @@ moto = [ {file = "moto-1.3.16-py2.py3-none-any.whl", hash = "sha256:f51903b6b532f6c887b111b3343f6925b77eef0505a914138d98290cf3526df9"}, {file = "moto-1.3.16.tar.gz", hash = "sha256:6c686b1f117563391957ce47c2106bc3868783d59d0e004d2446dce875bec07f"}, ] -multidict = [ - {file = "multidict-5.1.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224"}, - {file = "multidict-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26"}, - {file = "multidict-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6"}, - {file = "multidict-5.1.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37"}, - {file = "multidict-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5"}, - {file = "multidict-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632"}, - {file = "multidict-5.1.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea"}, - {file = "multidict-5.1.0-cp38-cp38-win32.whl", hash = "sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656"}, - {file = "multidict-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3"}, - {file = "multidict-5.1.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda"}, - {file = "multidict-5.1.0-cp39-cp39-win32.whl", hash = "sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80"}, - {file = "multidict-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359"}, - {file = "multidict-5.1.0.tar.gz", hash = "sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5"}, +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] nodeenv = [ {file = "nodeenv-1.5.0-py2.py3-none-any.whl", hash = "sha256:5304d424c529c997bc888453aeaa6362d242b6b4631e90f3d4bf1b290f1c84a9"}, @@ -5161,9 +4773,9 @@ packaging = [ {file = "packaging-20.8-py2.py3-none-any.whl", hash = "sha256:24e0da08660a87484d1602c30bb4902d74816b6985b93de36926f5bc95741858"}, {file = "packaging-20.8.tar.gz", hash = "sha256:78598185a7008a470d64526a8059de9aaa449238f280fc9eb6b13ba6c4109093"}, ] -pexpect = [ - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, - {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, +pathspec = [ + {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, + {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, ] pluggy = [ {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, @@ -5196,10 +4808,6 @@ protobuf = [ protobuf3-to-dict = [ {file = "protobuf3-to-dict-0.1.5.tar.gz", hash = "sha256:1e42c25b5afb5868e3a9b1962811077e492c17557f9c66f0fe40d821375d2b5a"}, ] -ptyprocess = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] publication = [ {file = "publication-0.0.3-py2.py3-none-any.whl", hash = "sha256:0248885351febc11d8a1098d5c8e3ab2dabcf3e8c0c96db1e17ecd12b53afbe6"}, {file = "publication-0.0.3.tar.gz", hash = "sha256:68416a0de76dddcdd2930d1c8ef853a743cc96c82416c4e4d3b5d901c6276dc4"}, @@ -5275,22 +4883,73 @@ pyyaml = [ {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541"}, {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0"}, {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc"}, {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6"}, {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, ] +regex = [ + {file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7"}, + {file = "regex-2021.4.4-cp36-cp36m-win32.whl", hash = "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29"}, + {file = "regex-2021.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79"}, + {file = "regex-2021.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439"}, + {file = "regex-2021.4.4-cp37-cp37m-win32.whl", hash = "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d"}, + {file = "regex-2021.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3"}, + {file = "regex-2021.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87"}, + {file = "regex-2021.4.4-cp38-cp38-win32.whl", hash = "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac"}, + {file = "regex-2021.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2"}, + {file = "regex-2021.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"}, + {file = "regex-2021.4.4-cp39-cp39-win32.whl", hash = "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6"}, + {file = "regex-2021.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07"}, + {file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"}, +] requests = [ {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, @@ -5307,36 +4966,6 @@ rsa = [ {file = "rsa-4.7-py3-none-any.whl", hash = "sha256:a8774e55b59fd9fc893b0d05e9bfc6f47081f46ff5b46f39ccf24631b7be356b"}, {file = "rsa-4.7.tar.gz", hash = "sha256:69805d6b69f56eb05b62daea3a7dbd7aa44324ad1306445e05da8060232d00f4"}, ] -"ruamel.yaml" = [ - {file = "ruamel.yaml-0.16.12-py2.py3-none-any.whl", hash = "sha256:012b9470a0ea06e4e44e99e7920277edf6b46eee0232a04487ea73a7386340a5"}, - {file = "ruamel.yaml-0.16.12.tar.gz", hash = "sha256:076cc0bc34f1966d920a49f18b52b6ad559fbe656a0748e3535cf7b3f29ebf9e"}, -] -"ruamel.yaml.clib" = [ - {file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:28116f204103cb3a108dfd37668f20abe6e3cafd0d3fd40dba126c732457b3cc"}, - {file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:daf21aa33ee9b351f66deed30a3d450ab55c14242cfdfcd377798e2c0d25c9f1"}, - {file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-win32.whl", hash = "sha256:30dca9bbcbb1cc858717438218d11eafb78666759e5094dd767468c0d577a7e7"}, - {file = "ruamel.yaml.clib-0.2.2-cp27-cp27m-win_amd64.whl", hash = "sha256:f6061a31880c1ed6b6ce341215336e2f3d0c1deccd84957b6fa8ca474b41e89f"}, - {file = "ruamel.yaml.clib-0.2.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:73b3d43e04cc4b228fa6fa5d796409ece6fcb53a6c270eb2048109cbcbc3b9c2"}, - {file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:53b9dd1abd70e257a6e32f934ebc482dac5edb8c93e23deb663eac724c30b026"}, - {file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:839dd72545ef7ba78fd2aa1a5dd07b33696adf3e68fae7f31327161c1093001b"}, - {file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-win32.whl", hash = "sha256:b1e981fe1aff1fd11627f531524826a4dcc1f26c726235a52fcb62ded27d150f"}, - {file = "ruamel.yaml.clib-0.2.2-cp35-cp35m-win_amd64.whl", hash = "sha256:4e52c96ca66de04be42ea2278012a2342d89f5e82b4512fb6fb7134e377e2e62"}, - {file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a873e4d4954f865dcb60bdc4914af7eaae48fb56b60ed6daa1d6251c72f5337c"}, - {file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ab845f1f51f7eb750a78937be9f79baea4a42c7960f5a94dde34e69f3cce1988"}, - {file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-win32.whl", hash = "sha256:e9f7d1d8c26a6a12c23421061f9022bb62704e38211fe375c645485f38df34a2"}, - {file = "ruamel.yaml.clib-0.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:2602e91bd5c1b874d6f93d3086f9830f3e907c543c7672cf293a97c3fabdcd91"}, - {file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:44c7b0498c39f27795224438f1a6be6c5352f82cb887bc33d962c3a3acc00df6"}, - {file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:8e8fd0a22c9d92af3a34f91e8a2594eeb35cba90ab643c5e0e643567dc8be43e"}, - {file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-win32.whl", hash = "sha256:464e66a04e740d754170be5e740657a3b3b6d2bcc567f0c3437879a6e6087ff6"}, - {file = "ruamel.yaml.clib-0.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:52ae5739e4b5d6317b52f5b040b1b6639e8af68a5b8fd606a8b08658fbd0cab5"}, - {file = "ruamel.yaml.clib-0.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4df5019e7783d14b79217ad9c56edf1ba7485d614ad5a385d1b3c768635c81c0"}, - {file = "ruamel.yaml.clib-0.2.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5254af7d8bdf4d5484c089f929cb7f5bafa59b4f01d4f48adda4be41e6d29f99"}, - {file = "ruamel.yaml.clib-0.2.2-cp38-cp38-win32.whl", hash = "sha256:74161d827407f4db9072011adcfb825b5258a5ccb3d2cd518dd6c9edea9e30f1"}, - {file = "ruamel.yaml.clib-0.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:058a1cc3df2a8aecc12f983a48bda99315cebf55a3b3a5463e37bb599b05727b"}, - {file = "ruamel.yaml.clib-0.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6ac7e45367b1317e56f1461719c853fd6825226f45b835df7436bb04031fd8a"}, - {file = "ruamel.yaml.clib-0.2.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b4b0d31f2052b3f9f9b5327024dc629a253a83d8649d4734ca7f35b60ec3e9e5"}, - {file = "ruamel.yaml.clib-0.2.2.tar.gz", hash = "sha256:2d24bd98af676f4990c4d715bcdc2a60b19c56a3fb3a763164d2d8ca0e806ba7"}, -] s3transfer = [ {file = "s3transfer-0.3.4-py2.py3-none-any.whl", hash = "sha256:1e28620e5b444652ed752cf87c7e0cb15b0e578972568c6609f0f18212f259ed"}, {file = "s3transfer-0.3.4.tar.gz", hash = "sha256:7fdddb4f22275cf1d32129e21f056337fd2a80b6ccef1664528145b72c49e6d2"}, @@ -5371,10 +5000,6 @@ scipy = [ {file = "scipy-1.5.4-cp39-cp39-win_amd64.whl", hash = "sha256:cc1f78ebc982cd0602c9a7615d878396bec94908db67d4ecddca864d049112f2"}, {file = "scipy-1.5.4.tar.gz", hash = "sha256:4a453d5e5689de62e5d38edf40af3f17560bfd63c9c5bd228c18c1f99afa155b"}, ] -shellingham = [ - {file = "shellingham-1.3.2-py2.py3-none-any.whl", hash = "sha256:7f6206ae169dc1a03af8a138681b3f962ae61cc93ade84d0585cca3aaf770044"}, - {file = "shellingham-1.3.2.tar.gz", hash = "sha256:576c1982bea0ba82fb46c36feb951319d7f42214a82634233f58b40d858a751e"}, -] six = [ {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, @@ -5383,6 +5008,10 @@ smdebug-rulesconfig = [ {file = "smdebug_rulesconfig-0.1.4-py2.py3-none-any.whl", hash = "sha256:72e69cffd2f35708dc2a758d07368c6b15ddc031bb183a2a75273ffe2c0f8319"}, {file = "smdebug_rulesconfig-0.1.4.tar.gz", hash = "sha256:8c8e14926222451b2821e7ab065830d90770e895e6f095fe7cefd235c84b5996"}, ] +sshpubkeys = [ + {file = "sshpubkeys-3.3.1-py2.py3-none-any.whl", hash = "sha256:946f76b8fe86704b0e7c56a00d80294e39bc2305999844f079a217885060b1ac"}, + {file = "sshpubkeys-3.3.1.tar.gz", hash = "sha256:3020ed4f8c846849299370fbe98ff4157b0ccc1accec105e07cfa9ae4bb55064"}, +] stepfunctions = [ {file = "stepfunctions-1.1.2.tar.gz", hash = "sha256:bc5991927dc2cdb1b25b4af58c397567b0690645a6182d00b585a1608d47f9c4"}, ] @@ -5390,9 +5019,37 @@ toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] -tomlkit = [ - {file = "tomlkit-0.7.0-py2.py3-none-any.whl", hash = "sha256:6babbd33b17d5c9691896b0e68159215a9387ebfa938aa3ac42f4a4beeb2b831"}, - {file = "tomlkit-0.7.0.tar.gz", hash = "sha256:ac57f29693fab3e309ea789252fcce3061e19110085aa31af5446ca749325618"}, +typed-ast = [ + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, + {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, + {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, + {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, + {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, + {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, + {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, + {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, + {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, + {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, ] typer = [ {file = "typer-0.3.2-py3-none-any.whl", hash = "sha256:ba58b920ce851b12a2d790143009fa00ac1d05b3ff3257061ff69dbdfc3d161b"}, @@ -5426,49 +5083,6 @@ xmltodict = [ {file = "xmltodict-0.12.0-py2.py3-none-any.whl", hash = "sha256:8bbcb45cc982f48b2ca8fe7e7827c5d792f217ecf1792626f808bf41c3b86051"}, {file = "xmltodict-0.12.0.tar.gz", hash = "sha256:50d8c638ed7ecb88d90561beedbf720c9b4e851a9fa6c47ebd64e99d166d8a21"}, ] -yarl = [ - {file = "yarl-1.6.3-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434"}, - {file = "yarl-1.6.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478"}, - {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6"}, - {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e"}, - {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406"}, - {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76"}, - {file = "yarl-1.6.3-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366"}, - {file = "yarl-1.6.3-cp36-cp36m-win32.whl", hash = "sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721"}, - {file = "yarl-1.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643"}, - {file = "yarl-1.6.3-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e"}, - {file = "yarl-1.6.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3"}, - {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8"}, - {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a"}, - {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c"}, - {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f"}, - {file = "yarl-1.6.3-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970"}, - {file = "yarl-1.6.3-cp37-cp37m-win32.whl", hash = "sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e"}, - {file = "yarl-1.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50"}, - {file = "yarl-1.6.3-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2"}, - {file = "yarl-1.6.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec"}, - {file = "yarl-1.6.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71"}, - {file = "yarl-1.6.3-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc"}, - {file = "yarl-1.6.3-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959"}, - {file = "yarl-1.6.3-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2"}, - {file = "yarl-1.6.3-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2"}, - {file = "yarl-1.6.3-cp38-cp38-win32.whl", hash = "sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896"}, - {file = "yarl-1.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a"}, - {file = "yarl-1.6.3-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e"}, - {file = "yarl-1.6.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724"}, - {file = "yarl-1.6.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c"}, - {file = "yarl-1.6.3-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25"}, - {file = "yarl-1.6.3-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96"}, - {file = "yarl-1.6.3-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0"}, - {file = "yarl-1.6.3-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4"}, - {file = "yarl-1.6.3-cp39-cp39-win32.whl", hash = "sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424"}, - {file = "yarl-1.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6"}, - {file = "yarl-1.6.3.tar.gz", hash = "sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10"}, -] -yaspin = [ - {file = "yaspin-1.3.0-py2.py3-none-any.whl", hash = "sha256:3b84b4c506271b638e6b09b9cb3ffe9a5c2aef0bb19ce9128cdd9be936c8d8ba"}, - {file = "yaspin-1.3.0.tar.gz", hash = "sha256:cc37d35cc7f796dada6c37430b49e471ffa05d0686e6f8de36f83978b732df54"}, -] zipp = [ {file = "zipp-3.4.0-py3-none-any.whl", hash = "sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108"}, {file = "zipp-3.4.0.tar.gz", hash = "sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb"}, diff --git a/pyproject.toml b/pyproject.toml index 23374f7..65bc2b4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ homepage = "https://github.com/vincentclaes/datajob" keywords = ["pipelines", "machine learning", "data pipelines", "data science", "data engineering"] [tool.poetry.dependencies] -python = "^3.6.1" +python = "^3.6.2" stepfunctions = "^1.1.2" contextvars = "^2.4" typer = "^0.3.2" @@ -26,9 +26,9 @@ rich = "^9.13.0" [tool.poetry.dev-dependencies] moto = "^1.3.16" -dephell = "^0.8.3" pre-commit = "^2.9.3" pytest = "^6.2.1" +black = "^21.6b0" [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/setup.py b/setup.py deleted file mode 100644 index 33d34fb..0000000 --- a/setup.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -from setuptools import setup - -packages = \ -['datajob', 'datajob.glue', 'datajob.package', 'datajob.stepfunctions'] - -package_data = \ -{'': ['*']} - -install_requires = \ -['aws-cdk.aws-glue>=1.87.1,<2.0.0', - 'aws-cdk.aws-s3-deployment>=1.87.1,<2.0.0', - 'aws-cdk.cloudformation-include>=1.87.1,<2.0.0', - 'aws-cdk.core>=1.87.1,<2.0.0', - 'aws-empty-bucket>=2.4.0,<3.0.0', - 'contextvars>=2.4,<3.0', - 'dephell>=0.8.3,<0.9.0', - 'rich>=9.13.0,<10.0.0', - 'stepfunctions>=1.1.2,<2.0.0', - 'typer>=0.3.2,<0.4.0'] - -entry_points = \ -{'console_scripts': ['datajob = datajob.datajob:run']} - -setup_kwargs = { - 'name': 'datajob', - 'version': '0.7.0', - 'description': 'Build and deploy a serverless data pipeline with no effort on AWS.', - 'long_description': '![logo](./assets/logo.png)\n\n
\n Build and deploy a serverless data pipeline on AWS with no effort.
\n Our goal is to let developers think about the business logic, datajob does the rest...\n
\n
\n
\n
\n\n\n- We support creating and deploying code to python shell / pyspark Glue jobs.\n- Orchestrate the glue jobs using stepfunctions as simple as `task1 >> [task2,task3] >> task4`\n- Let us [know](https://github.com/vincentclaes/datajob/discussions) what you want to see next.\n\n> Dependencies are [AWS CDK](https://github.com/aws/aws-cdk) and [Step Functions SDK for data science](https://github.com/aws/aws-step-functions-data-science-sdk-python)
\n\n# Installation\n\n Datajob can be installed using pip.
\n Beware that we depend on [aws cdk cli](https://github.com/aws/aws-cdk)!\n\n pip install datajob\n npm install -g aws-cdk@1.98.0 # latest version of datajob depends this version\n\n# Quickstart\n\nWe have a simple data pipeline composed of [2 glue jobs](./examples/data_pipeline_with_packaged_project/glue_jobs/) orchestrated sequentially using step functions.\n\n```python\nimport pathlib\nfrom aws_cdk import core\n\nfrom datajob.datajob_stack import DataJobStack\nfrom datajob.glue.glue_job import GlueJob\nfrom datajob.stepfunctions.stepfunctions_workflow import StepfunctionsWorkflow\n\n\ncurrent_dir = pathlib.Path(__file__).parent.absolute()\n\napp = core.App()\n\n\nwith DataJobStack(scope=app, id="data-pipeline-pkg", project_root=current_dir) as datajob_stack:\n\n task1 = GlueJob(\n datajob_stack=datajob_stack, name="task1", job_path="glue_jobs/task1.py"\n )\n\n task2 = GlueJob(\n datajob_stack=datajob_stack, name="task2", job_path="glue_jobs/task2.py"\n )\n\n with StepfunctionsWorkflow(datajob_stack=datajob_stack, name="workflow") as step_functions_workflow:\n task1 >> task2\n\napp.synth()\n\n```\n\nWe add the above code in a file called `datajob_stack.py` in the [root of the project](./examples/data_pipeline_with_packaged_project/).\n\n\n### Configure CDK\nFollow the steps [here](https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-quickstart.html#cli-configure-quickstart-config) to configure your credentials.\n\n```shell script\nexport AWS_PROFILE=default\n# use the aws cli to get your account number\nexport AWS_ACCOUNT=$(aws sts get-caller-identity --query Account --output text --profile $AWS_PROFILE)\nexport AWS_DEFAULT_REGION=us-east-2\n\ncdk bootstrap aws://$AWS_ACCOUNT/$AWS_DEFAULT_REGION\n```\n\n### Deploy\n\n```shell\nexport STAGE=$AWS_ACCOUNT\ncd examples/data_pipeline_with_packaged_project\ndatajob deploy --config datajob_stack.py --stage $STAGE --package setuppy\n```\nDatajob will create s3 buckets based on the `stage` variable.\nThe stage variable will typically be something like "dev", "stg", "prd", ...\nbut since S3 buckets need to be globally unique, for this example we will use our `$AWS_ACCOUNT` for the `--stage` parameter.\n\n
\nuse cdk cli\n\n```shell script\ncd examples/data_pipeline_with_packaged_project\npython setup.py bdist_wheel\ncdk deploy --app "python datajob_stack.py" -c stage=$STAGE\n```\n
\n\n### Run\n\n```shell script\ndatajob execute --state-machine data-pipeline-pkg-$STAGE-workflow\n```\nThe step function state machine name is constructed as `--`.\nThe terminal will show a link to the step functions page to follow up on your pipeline run.\n\n### Destroy\n\n```shell script\ndatajob destroy --config datajob_stack.py --stage $STAGE\n```\n\n
\nuse cdk cli\n\n```shell script\ncdk destroy --app "python datajob_stack.py" -c stage=$STAGE\n```\n
\n\n> Note: you can use any cdk arguments in the datajob cli\n\n# Functionality\n\n
\nUsing datajob\'s S3 data bucket\n\nDynamically reference the `datajob_stack` data bucket name to the arguments of your GlueJob by calling\n`datajob_stack.context.data_bucket_name`.\n\n```python\nimport pathlib\n\nfrom aws_cdk import core\nfrom datajob.datajob_stack import DataJobStack\nfrom datajob.glue.glue_job import GlueJob\nfrom datajob.stepfunctions.stepfunctions_workflow import StepfunctionsWorkflow\n\ncurrent_dir = str(pathlib.Path(__file__).parent.absolute())\n\napp = core.App()\n\nwith DataJobStack(\n scope=app, id="datajob-python-pyspark", project_root=current_dir\n) as datajob_stack:\n\n pyspark_job = GlueJob(\n datajob_stack=datajob_stack,\n name="pyspark-job",\n job_path="glue_job/glue_pyspark_example.py",\n job_type="glueetl",\n glue_version="2.0", # we only support glue 2.0\n python_version="3",\n worker_type="Standard", # options are Standard / G.1X / G.2X\n number_of_workers=1,\n arguments={\n "--source": f"s3://{datajob_stack.context.data_bucket_name}/raw/iris_dataset.csv",\n "--destination": f"s3://{datajob_stack.context.data_bucket_name}/target/pyspark_job/iris_dataset.parquet",\n },\n )\n\n with StepfunctionsWorkflow(datajob_stack=datajob_stack, name="workflow") as sfn:\n pyspark_job >> ...\n\n```\n\ndeploy to stage `my-stage`:\n\n```shell\ndatajob deploy --config datajob_stack.py --stage my-stage --package setuppy\n```\n\n`datajob_stack.context.data_bucket_name` will evaluate to `datajob-python-pyspark-my-stage`\n\nyou can find this example [here](./examples/data_pipeline_pyspark/glue_job/glue_pyspark_example.py)\n\n
\n\n
\nDeploy files to deployment bucket\n\nSpecify the path to the folder we would like to include in the deployment bucket.\n\n```python\n\nfrom aws_cdk import core\nfrom datajob.datajob_stack import DataJobStack\n\napp = core.App()\n\nwith DataJobStack(\n scope=app, id="some-stack-name", include_folder="path/to/folder/"\n) as datajob_stack:\n\n ...\n\n```\n\n
\n\n
\nPackage project\n\nPackage you project using [poetry](https://python-poetry.org/)\n\n```shell\ndatajob deploy --config datajob_stack.py --package poetry\n```\nPackage you project using [setup.py](./examples/data_pipeline_with_packaged_project)\n```shell\ndatajob deploy --config datajob_stack.py --package setuppy\n```\n
\n\n
\nUsing Pyspark\n\n```python\nimport pathlib\n\nfrom aws_cdk import core\nfrom datajob.datajob_stack import DataJobStack\nfrom datajob.glue.glue_job import GlueJob\nfrom datajob.stepfunctions.stepfunctions_workflow import StepfunctionsWorkflow\n\ncurrent_dir = str(pathlib.Path(__file__).parent.absolute())\n\napp = core.App()\n\nwith DataJobStack(\n scope=app, id="datajob-python-pyspark", project_root=current_dir\n) as datajob_stack:\n\n pyspark_job = GlueJob(\n datajob_stack=datajob_stack,\n name="pyspark-job",\n job_path="glue_job/glue_pyspark_example.py",\n job_type="glueetl",\n glue_version="2.0", # we only support glue 2.0\n python_version="3",\n worker_type="Standard", # options are Standard / G.1X / G.2X\n number_of_workers=1,\n arguments={\n "--source": f"s3://{datajob_stack.context.data_bucket_name}/raw/iris_dataset.csv",\n "--destination": f"s3://{datajob_stack.context.data_bucket_name}/target/pyspark_job/iris_dataset.parquet",\n },\n )\n```\nfull example can be found in [examples/data_pipeline_pyspark](examples/data_pipeline_pyspark]).\n
\n\n
\nOrchestrate stepfunctions tasks in parallel\n\n```python\n# task1 and task2 are orchestrated in parallel.\n# task3 will only start when both task1 and task2 have succeeded.\n[task1, task2] >> task3\n```\n\n
\n\n
\nOrchestrate 1 stepfunction task\n\nUse the [Ellipsis](https://docs.python.org/dev/library/constants.html#Ellipsis) object to be able to orchestrate 1 job via step functions.\n\n```python\nsome_task >> ...\n```\n\n
\n\n\n# Datajob in depth\n\nThe `datajob_stack` is the instance that will result in a cloudformation stack.\nThe path in `project_root` helps `datajob_stack` locate the root of the project where\nthe setup.py/poetry pyproject.toml file can be found, as well as the `dist/` folder with the wheel of your project .\n\n```python\nimport pathlib\nfrom aws_cdk import core\n\nfrom datajob.datajob_stack import DataJobStack\n\ncurrent_dir = pathlib.Path(__file__).parent.absolute()\napp = core.App()\n\nwith DataJobStack(\n scope=app, id="data-pipeline-pkg", project_root=current_dir\n) as datajob_stack:\n\n ...\n```\n\nWhen __entering the contextmanager__ of DataJobStack:\n\nA [DataJobContext](./datajob/datajob_stack.py#L48) is initialized\nto deploy and run a data pipeline on AWS.\nThe following resources are created:\n1) "data bucket"\n - an S3 bucket that you can use to dump ingested data, dump intermediate results and the final output.\n - you can access the data bucket as a [Bucket](https://docs.aws.amazon.com/cdk/api/latest/python/aws_cdk.aws_s3/Bucket.html) object via ```datajob_stack.context.data_bucket```\n - you can access the data bucket name via ```datajob_stack.context.data_bucket_name```\n2) "deployment bucket"\n - an s3 bucket to deploy code, artifacts, scripts, config, files, ...\n - you can access the deployment bucket as a [Bucket](https://docs.aws.amazon.com/cdk/api/latest/python/aws_cdk.aws_s3/Bucket.html) object via ```datajob_stack.context.deployment_bucket```\n - you can access the deployment bucket name via ```datajob_stack.context.deployment_bucket_name```\n\nwhen __exiting the context manager__ all the resources of our DataJobStack object are created.\n\n
\nWe can write the above example more explicitly...\n\n```python\nimport pathlib\nfrom aws_cdk import core\n\nfrom datajob.datajob_stack import DataJobStack\nfrom datajob.glue.glue_job import GlueJob\nfrom datajob.stepfunctions.stepfunctions_workflow import StepfunctionsWorkflow\n\napp = core.App()\n\ncurrent_dir = pathlib.Path(__file__).parent.absolute()\n\napp = core.App()\n\ndatajob_stack = DataJobStack(scope=app, id="data-pipeline-pkg", project_root=current_dir)\ndatajob_stack.init_datajob_context()\n\ntask1 = GlueJob(datajob_stack=datajob_stack, name="task1", job_path="glue_jobs/task1.py")\ntask2 = GlueJob(datajob_stack=datajob_stack, name="task2", job_path="glue_jobs/task2.py")\n\nwith StepfunctionsWorkflow(datajob_stack=datajob_stack, name="workflow") as step_functions_workflow:\n task1 >> task2\n\ndatajob_stack.create_resources()\napp.synth()\n```\n
\n\n# Ideas\n\nAny suggestions can be shared by starting a [discussion](https://github.com/vincentclaes/datajob/discussions)\n\nThese are the ideas, we find interesting to implement;\n\n- add a time based trigger to the step functions workflow.\n- add an s3 event trigger to the step functions workflow.\n- add a lambda that copies data from one s3 location to another.\n- add an sns that notifies in case of any failure (slack/email)\n- version your data pipeline.\n- cli command to view the logs / glue jobs / s3 bucket\n- implement sagemaker services\n - processing jobs\n - hyperparameter tuning jobs\n - training jobs\n- implement lambda\n- implement ECS Fargate\n- create a serverless UI that follows up on the different pipelines deployed on possibly different AWS accounts using Datajob\n\n> [Feedback](https://github.com/vincentclaes/datajob/discussions) is much appreciated!\n', - 'author': 'Vincent Claes', - 'author_email': 'vincent.v.claes@gmail.com', - 'maintainer': None, - 'maintainer_email': None, - 'url': 'https://github.com/vincentclaes/datajob', - 'packages': packages, - 'package_data': package_data, - 'install_requires': install_requires, - 'entry_points': entry_points, - 'python_requires': '>=3.6.1,<4.0.0', -} - - -setup(**setup_kwargs) - From b3619bfcab82f7be22a64305eed37b17f7038fed Mon Sep 17 00:00:00 2001 From: vincent Date: Sat, 19 Jun 2021 08:44:37 +0200 Subject: [PATCH 4/6] update readme --- .gitignore | 3 ++- README.md | 18 +++++++++++++++++- .../test_stepfunctions_workflow.py | 2 +- pyproject.toml | 1 - 4 files changed, 20 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index 9dc92fb..9443886 100644 --- a/.gitignore +++ b/.gitignore @@ -129,4 +129,5 @@ dmypy.json .pyre/ .idea/ datajob_tests/build/ -cdk.out/ \ No newline at end of file +cdk.out/ +.vscode/ diff --git a/README.md b/README.md index 58027d4..e27718c 100644 --- a/README.md +++ b/README.md @@ -269,6 +269,23 @@ some_task >> ... +
+Notify in case of error/success. + +Provide the parameter `notification` in the constructor of a StepfunctionsWorkflow object. +This will create an SNS Topic which will be triggered in case of failure or success. +The email will subscribe to the topic and receive the notification in its inbox. + +```python + with StepfunctionsWorkflow(datajob_stack=datajob_stack, + name="workflow", + notification="email@domain.com") as sfn: + task1 >> task2 +``` + +You can provide 1 email or a list of emails `["email1@domain.com", "email2@domain.com"]`. + +
# Datajob in depth @@ -348,7 +365,6 @@ These are the ideas, we find interesting to implement; - add a time based trigger to the step functions workflow. - add an s3 event trigger to the step functions workflow. - add a lambda that copies data from one s3 location to another. -- add an sns that notifies in case of any failure (slack/email) - version your data pipeline. - cli command to view the logs / glue jobs / s3 bucket - implement sagemaker services diff --git a/datajob_tests/stepfunctions/test_stepfunctions_workflow.py b/datajob_tests/stepfunctions/test_stepfunctions_workflow.py index 8f26a77..6fa7cb7 100644 --- a/datajob_tests/stepfunctions/test_stepfunctions_workflow.py +++ b/datajob_tests/stepfunctions/test_stepfunctions_workflow.py @@ -125,7 +125,7 @@ def test_create_workflow_with_notification_successfully(self): with io.StringIO() as f: f.write(a_step_functions_workflow.workflow.get_cloudformation_template()) f.seek(0) - cf_template = yaml.load(f) + cf_template = yaml.load(f, Loader=yaml.FullLoader) sfn_workflow = json.loads( cf_template.get("Resources") diff --git a/pyproject.toml b/pyproject.toml index 65bc2b4..f18fd62 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,6 @@ rich = "^9.13.0" moto = "^1.3.16" pre-commit = "^2.9.3" pytest = "^6.2.1" -black = "^21.6b0" [build-system] requires = ["poetry-core>=1.0.0"] From 0eaedaa8642238fdbb63198b582cff5948f9349d Mon Sep 17 00:00:00 2001 From: vincent Date: Sat, 19 Jun 2021 08:46:12 +0200 Subject: [PATCH 5/6] update readme --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e27718c..c736a9f 100644 --- a/README.md +++ b/README.md @@ -272,7 +272,7 @@ some_task >> ...
Notify in case of error/success. -Provide the parameter `notification` in the constructor of a StepfunctionsWorkflow object. +Provide the parameter `notification` in the constructor of a `StepfunctionsWorkflow` object. This will create an SNS Topic which will be triggered in case of failure or success. The email will subscribe to the topic and receive the notification in its inbox. From 3b03015c6ad7df725f4f2a6ca65243b765ba1e3e Mon Sep 17 00:00:00 2001 From: vincent Date: Sat, 19 Jun 2021 08:48:12 +0200 Subject: [PATCH 6/6] update readme --- README.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index c736a9f..0b12197 100644 --- a/README.md +++ b/README.md @@ -277,10 +277,10 @@ This will create an SNS Topic which will be triggered in case of failure or succ The email will subscribe to the topic and receive the notification in its inbox. ```python - with StepfunctionsWorkflow(datajob_stack=datajob_stack, - name="workflow", - notification="email@domain.com") as sfn: - task1 >> task2 +with StepfunctionsWorkflow(datajob_stack=datajob_stack, + name="workflow", + notification="email@domain.com") as sfn: + task1 >> task2 ``` You can provide 1 email or a list of emails `["email1@domain.com", "email2@domain.com"]`.