From 655c583ee01d547fdf29e2e1ed3a674f5bcba6ba Mon Sep 17 00:00:00 2001 From: Devis Lucato Date: Thu, 16 Mar 2023 19:54:34 -0700 Subject: [PATCH] Initial commit --- .gitattributes | 2 +- docs/PLANNER.md | 2 +- python/.conf/.pre-commit-config.yaml | 22 + python/.conf/flake8.cfg | 3 + python/.editorconfig | 29 + python/.vscode/settings.json | 21 + python/DEV_SETUP.md | 87 + python/FEATURE_PARITY.md | 199 ++ python/Makefile | 54 + python/README.md | 72 + python/poetry.lock | 1785 +++++++++++++++++ python/pyproject.toml | 25 + python/requirements.txt | 2 + python/semantic_kernel/__init__.py | 42 + python/semantic_kernel/ai/ai_exception.py | 60 + .../ai/complete_request_settings.py | 39 + .../ai/embeddings/embedding_generator_base.py | 12 + .../ai/embeddings/embedding_index_base.py | 20 + .../open_ai/services/azure_open_ai_config.py | 54 + .../open_ai/services/azure_text_completion.py | 5 + .../ai/open_ai/services/open_ai_config.py | 41 + .../services/open_ai_text_completion.py | 113 ++ .../services/open_ai_text_embedding.py | 66 + .../ai/text_completion_client_base.py | 19 + .../configuration/backend_config.py | 35 + .../configuration/backend_types.py | 9 + .../configuration/kernel_config.py | 274 +++ .../semantic_kernel/core_skills/__init__.py | 5 + .../core_skills/text_memory_skill.py | 132 ++ .../diagnostics/sk_exception.py | 29 + .../diagnostics/validation_exception.py | 50 + python/semantic_kernel/diagnostics/verify.py | 105 + python/semantic_kernel/kernel.py | 281 +++ python/semantic_kernel/kernel_base.py | 88 + python/semantic_kernel/kernel_builder.py | 90 + python/semantic_kernel/kernel_exception.py | 58 + .../kernel_extensions/__init__.py | 54 + .../import_semantic_skill_from_directory.py | 60 + .../inline_function_definitions.py | 63 + .../kernel_extensions/memory_configuration.py | 51 + python/semantic_kernel/memory/__init__.py | 4 + .../memory/memory_query_result.py | 43 + .../semantic_kernel/memory/memory_record.py | 63 + .../memory/memory_store_base.py | 10 + python/semantic_kernel/memory/null_memory.py | 43 + .../memory/semantic_text_memory.py | 73 + .../memory/semantic_text_memory_base.py | 54 + .../memory/storage/data_entry.py | 36 + .../memory/storage/data_store_base.py | 36 + .../memory/storage/volatile_data_store.py | 62 + .../memory/volatile_memory_store.py | 52 + .../orchestration/context_variables.py | 70 + .../orchestration/delegate_handlers.py | 150 ++ .../orchestration/delegate_inference.py | 246 +++ .../orchestration/delegate_types.py | 25 + .../orchestration/sk_context.py | 237 +++ .../orchestration/sk_function.py | 288 +++ .../orchestration/sk_function_base.py | 188 ++ .../reliability/pass_through_without_retry.py | 30 + .../reliability/retry_mechanism.py | 24 + .../semantic_functions/prompt_template.py | 72 + .../prompt_template_base.py | 18 + .../prompt_template_config.py | 93 + .../semantic_function_config.py | 16 + .../skill_definition/__init__.py | 16 + .../skill_definition/function_view.py | 82 + .../skill_definition/functions_view.py | 46 + .../skill_definition/parameter_view.py | 41 + .../read_only_skill_collection.py | 51 + .../read_only_skill_collection_base.py | 44 + ...sk_function_context_parameter_decorator.py | 31 + .../skill_definition/sk_function_decorator.py | 17 + .../sk_function_input_decorator.py | 17 + .../sk_function_name_decorator.py | 16 + .../skill_definition/skill_collection.py | 139 ++ .../skill_definition/skill_collection_base.py | 30 + .../template_engine/blocks/block.py | 39 + .../template_engine/blocks/block_types.py | 10 + .../template_engine/blocks/code_block.py | 154 ++ .../template_engine/blocks/text_block.py | 19 + .../template_engine/blocks/var_block.py | 76 + .../template_engine/prompt_template_engine.py | 174 ++ .../prompt_template_engine_base.py | 36 + .../template_engine/template_exception.py | 42 + python/semantic_kernel/utils/null_logger.py | 24 + python/semantic_kernel/utils/settings.py | 62 + .../semantic_kernel/utils/static_property.py | 8 + python/tests/__init__.py | 1 + python/tests/basics.py | 49 + python/tests/chat.py | 68 + python/tests/memory.py | 153 ++ .../python/1-basic-loading-the-kernel.ipynb | 175 ++ .../python/2-running-prompts-from-file.ipynb | 176 ++ .../python/3-semantic-function-inline.ipynb | 307 +++ .../python/4-context-variables-chat.ipynb | 252 +++ .../python/5-using-the-planner.ipynb | 260 +++ .../python/6-memory-and-embeddings.ipynb | 435 ++++ samples/notebooks/python/init.py | 62 + 98 files changed, 9201 insertions(+), 2 deletions(-) create mode 100644 python/.conf/.pre-commit-config.yaml create mode 100644 python/.conf/flake8.cfg create mode 100644 python/.editorconfig create mode 100644 python/.vscode/settings.json create mode 100644 python/DEV_SETUP.md create mode 100644 python/FEATURE_PARITY.md create mode 100644 python/Makefile create mode 100644 python/README.md create mode 100644 python/poetry.lock create mode 100644 python/pyproject.toml create mode 100644 python/requirements.txt create mode 100644 python/semantic_kernel/__init__.py create mode 100644 python/semantic_kernel/ai/ai_exception.py create mode 100644 python/semantic_kernel/ai/complete_request_settings.py create mode 100644 python/semantic_kernel/ai/embeddings/embedding_generator_base.py create mode 100644 python/semantic_kernel/ai/embeddings/embedding_index_base.py create mode 100644 python/semantic_kernel/ai/open_ai/services/azure_open_ai_config.py create mode 100644 python/semantic_kernel/ai/open_ai/services/azure_text_completion.py create mode 100644 python/semantic_kernel/ai/open_ai/services/open_ai_config.py create mode 100644 python/semantic_kernel/ai/open_ai/services/open_ai_text_completion.py create mode 100644 python/semantic_kernel/ai/open_ai/services/open_ai_text_embedding.py create mode 100644 python/semantic_kernel/ai/text_completion_client_base.py create mode 100644 python/semantic_kernel/configuration/backend_config.py create mode 100644 python/semantic_kernel/configuration/backend_types.py create mode 100644 python/semantic_kernel/configuration/kernel_config.py create mode 100644 python/semantic_kernel/core_skills/__init__.py create mode 100644 python/semantic_kernel/core_skills/text_memory_skill.py create mode 100644 python/semantic_kernel/diagnostics/sk_exception.py create mode 100644 python/semantic_kernel/diagnostics/validation_exception.py create mode 100644 python/semantic_kernel/diagnostics/verify.py create mode 100644 python/semantic_kernel/kernel.py create mode 100644 python/semantic_kernel/kernel_base.py create mode 100644 python/semantic_kernel/kernel_builder.py create mode 100644 python/semantic_kernel/kernel_exception.py create mode 100644 python/semantic_kernel/kernel_extensions/__init__.py create mode 100644 python/semantic_kernel/kernel_extensions/import_semantic_skill_from_directory.py create mode 100644 python/semantic_kernel/kernel_extensions/inline_function_definitions.py create mode 100644 python/semantic_kernel/kernel_extensions/memory_configuration.py create mode 100644 python/semantic_kernel/memory/__init__.py create mode 100644 python/semantic_kernel/memory/memory_query_result.py create mode 100644 python/semantic_kernel/memory/memory_record.py create mode 100644 python/semantic_kernel/memory/memory_store_base.py create mode 100644 python/semantic_kernel/memory/null_memory.py create mode 100644 python/semantic_kernel/memory/semantic_text_memory.py create mode 100644 python/semantic_kernel/memory/semantic_text_memory_base.py create mode 100644 python/semantic_kernel/memory/storage/data_entry.py create mode 100644 python/semantic_kernel/memory/storage/data_store_base.py create mode 100644 python/semantic_kernel/memory/storage/volatile_data_store.py create mode 100644 python/semantic_kernel/memory/volatile_memory_store.py create mode 100644 python/semantic_kernel/orchestration/context_variables.py create mode 100644 python/semantic_kernel/orchestration/delegate_handlers.py create mode 100644 python/semantic_kernel/orchestration/delegate_inference.py create mode 100644 python/semantic_kernel/orchestration/delegate_types.py create mode 100644 python/semantic_kernel/orchestration/sk_context.py create mode 100644 python/semantic_kernel/orchestration/sk_function.py create mode 100644 python/semantic_kernel/orchestration/sk_function_base.py create mode 100644 python/semantic_kernel/reliability/pass_through_without_retry.py create mode 100644 python/semantic_kernel/reliability/retry_mechanism.py create mode 100644 python/semantic_kernel/semantic_functions/prompt_template.py create mode 100644 python/semantic_kernel/semantic_functions/prompt_template_base.py create mode 100644 python/semantic_kernel/semantic_functions/prompt_template_config.py create mode 100644 python/semantic_kernel/semantic_functions/semantic_function_config.py create mode 100644 python/semantic_kernel/skill_definition/__init__.py create mode 100644 python/semantic_kernel/skill_definition/function_view.py create mode 100644 python/semantic_kernel/skill_definition/functions_view.py create mode 100644 python/semantic_kernel/skill_definition/parameter_view.py create mode 100644 python/semantic_kernel/skill_definition/read_only_skill_collection.py create mode 100644 python/semantic_kernel/skill_definition/read_only_skill_collection_base.py create mode 100644 python/semantic_kernel/skill_definition/sk_function_context_parameter_decorator.py create mode 100644 python/semantic_kernel/skill_definition/sk_function_decorator.py create mode 100644 python/semantic_kernel/skill_definition/sk_function_input_decorator.py create mode 100644 python/semantic_kernel/skill_definition/sk_function_name_decorator.py create mode 100644 python/semantic_kernel/skill_definition/skill_collection.py create mode 100644 python/semantic_kernel/skill_definition/skill_collection_base.py create mode 100644 python/semantic_kernel/template_engine/blocks/block.py create mode 100644 python/semantic_kernel/template_engine/blocks/block_types.py create mode 100644 python/semantic_kernel/template_engine/blocks/code_block.py create mode 100644 python/semantic_kernel/template_engine/blocks/text_block.py create mode 100644 python/semantic_kernel/template_engine/blocks/var_block.py create mode 100644 python/semantic_kernel/template_engine/prompt_template_engine.py create mode 100644 python/semantic_kernel/template_engine/prompt_template_engine_base.py create mode 100644 python/semantic_kernel/template_engine/template_exception.py create mode 100644 python/semantic_kernel/utils/null_logger.py create mode 100644 python/semantic_kernel/utils/settings.py create mode 100644 python/semantic_kernel/utils/static_property.py create mode 100644 python/tests/__init__.py create mode 100644 python/tests/basics.py create mode 100644 python/tests/chat.py create mode 100644 python/tests/memory.py create mode 100644 samples/notebooks/python/1-basic-loading-the-kernel.ipynb create mode 100644 samples/notebooks/python/2-running-prompts-from-file.ipynb create mode 100644 samples/notebooks/python/3-semantic-function-inline.ipynb create mode 100644 samples/notebooks/python/4-context-variables-chat.ipynb create mode 100644 samples/notebooks/python/5-using-the-planner.ipynb create mode 100644 samples/notebooks/python/6-memory-and-embeddings.ipynb create mode 100644 samples/notebooks/python/init.py diff --git a/.gitattributes b/.gitattributes index 367c088539db..e79a913b6c46 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,5 +1,5 @@ # Auto-detect text files, ensure they use LF. -* text=auto eol=lf working-tree-encoding=UTF-8 +* text=auto eol=lf # Bash scripts *.sh text eol=lf diff --git a/docs/PLANNER.md b/docs/PLANNER.md index 6a4ccecf417f..a2e2fc82daed 100644 --- a/docs/PLANNER.md +++ b/docs/PLANNER.md @@ -38,4 +38,4 @@ like "I want a job promotion." The planner will operate within the skills it has available. In the event that a desired skill does not exist, the planner can suggest you to create the skill. Or, depending upon the level of complexity the kernel can help you write the missing -skill. \ No newline at end of file +skill. diff --git a/python/.conf/.pre-commit-config.yaml b/python/.conf/.pre-commit-config.yaml new file mode 100644 index 000000000000..196f8b9e75b0 --- /dev/null +++ b/python/.conf/.pre-commit-config.yaml @@ -0,0 +1,22 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: check-toml + - id: check-yaml + - id: end-of-file-fixer + - id: mixed-line-ending + - repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black + - repo: https://github.com/PyCQA/isort + rev: 5.12.0 + hooks: + - id: isort + args: ["--profile", "black"] + - repo: https://github.com/pycqa/flake8 + rev: 6.0.0 + hooks: + - id: flake8 + args: ["--config=python/.conf/flake8.cfg"] diff --git a/python/.conf/flake8.cfg b/python/.conf/flake8.cfg new file mode 100644 index 000000000000..8dd399ab55bc --- /dev/null +++ b/python/.conf/flake8.cfg @@ -0,0 +1,3 @@ +[flake8] +max-line-length = 88 +extend-ignore = E203 diff --git a/python/.editorconfig b/python/.editorconfig new file mode 100644 index 000000000000..33e7014a0183 --- /dev/null +++ b/python/.editorconfig @@ -0,0 +1,29 @@ +# To learn more about .editorconfig see https://aka.ms/editorconfigdocs + +# All files +[*] +indent_style = space +end_of_line = lf + +# Docs +[*.md] +insert_final_newline = true +trim_trailing_whitespace = true + +# Config/data +[*.json] +indent_size = 4 +insert_final_newline = false +trim_trailing_whitespace = true + +# Config/data +[*.yaml] +indent_size = 4 +insert_final_newline = true +trim_trailing_whitespace = true + +# Code +[*.py] +indent_size = 4 +insert_final_newline = true +trim_trailing_whitespace = true diff --git a/python/.vscode/settings.json b/python/.vscode/settings.json new file mode 100644 index 000000000000..17d438492690 --- /dev/null +++ b/python/.vscode/settings.json @@ -0,0 +1,21 @@ +{ + "python.analysis.extraPaths": [ + "./src" + ], + "explorer.compactFolders": false, + "prettier.enable": true, + "editor.formatOnType": true, + "editor.formatOnSave": true, + "editor.formatOnPaste": true, + "python.formatting.provider": "autopep8", + "python.formatting.autopep8Args": [ + "--max-line-length=160" + ], + "notebook.output.textLineLimit": 500, + "cSpell.words": [ + "aeiou", + "nopep", + "OPENAI", + "skfunction" + ], +} diff --git a/python/DEV_SETUP.md b/python/DEV_SETUP.md new file mode 100644 index 000000000000..2e8e094fd239 --- /dev/null +++ b/python/DEV_SETUP.md @@ -0,0 +1,87 @@ +# System setup + +To get started, you'll need VSCode and a local installation of Python 3.x. + +You can run: + + python3 --version ; pip3 --version ; code -v + +to verify that you have the required dependencies. + +## If you're on WSL + +Check that you've cloned the repository to `~/workspace` or a similar folder. +Avoid `/mnt/c/` and prefer using your WSL user's home directory. + +Ensure you have the WSL extension for VSCode installed (and the Python extension +for VSCode installed). + +You'll also need `pip3` installed. If you don't yet have a `python3` install in WSL, +you can run: + +```bash +sudo apt-get update && sudo apt-get install python3 python3-pip +``` + +ℹ️ **Note**: if you don't have your PATH setup to find executables installed by `pip3`, +you may need to run `~/.local/bin/poetry install` and `~/.local/bin/poetry shell` +instead. You can fix this by adding `export PATH="$HOME/.local/bin:$PATH"` to +your `~/.bashrc` and closing/re-opening the terminal.\_ + +# LLM setup + +Make sure you have an +[Open AI API Key](https://openai.com/api/) or +[Azure Open AI service key](https://learn.microsoft.com/azure/cognitive-services/openai/quickstart?pivots=rest-api) + +ℹ️ **Note**: Azure OpenAI support is work in progress, and will be available soon. + +Copy those keys into a `.env` file like this: + +``` +OPENAI_API_KEY="" +OPENAI_ORG_ID="" +AZURE_OPENAI_API_KEY="" +AZURE_OPENAI_ENDPOINT="" +``` + +We suggest adding a copy of the `.env` file under these folders: + +- [python/tests](tests) +- [samples/notebooks/python](../samples/notebooks/python). + +# Quickstart with Poetry + +Poetry allows to use SK from the current repo, without worrying about paths, as +if you had SK pip package installed. SK pip package will be published after +porting all the major features and ensuring cross-compatibility with C# SDK. + +To install Poetry in your system: + + pip3 install poetry + +The following command install the project dependencies: + + poetry install + +And the following activates the project virtual environment, to make it easier +running samples in the repo and developing apps using Python SK. + + poetry shell + +To run the same checks that are run during the Azure Pipelines build, you can run: + + poetry run pre-commit run -c .conf/.pre-commit-config.yaml -a + +# VSCode Setup + +Open any of the `.py` files in the project and run the `Python: Select Interpreter` command +from the command palette. Make sure the virtual env (venv) created by `poetry` is selected. +The python you're looking for should be under `~/.cache/pypoetry/virtualenvs/semantic-kernel-.../bin/python`. + +If prompted, install `black` and `flake8` (if VSCode doesn't find those packages, +it will prompt you to install them). + +# Tests + +You should be able to run the example under the [tests](tests) folder. diff --git a/python/FEATURE_PARITY.md b/python/FEATURE_PARITY.md new file mode 100644 index 000000000000..4b458d9b19f9 --- /dev/null +++ b/python/FEATURE_PARITY.md @@ -0,0 +1,199 @@ +# Achieving Feature Parity in Python and C# + +This is a high-level overview of where things stand towards reaching feature parity with the main + [C# codebase](https://github.com/microsoft/semantic-kernel/tree/main/dotnet/src/SemanticKernel). + +| | | | +|------|------| ------ +| |Python| Notes | +|`./ai/embeddings`| 🔄| Using Numpy for embedding representation. Vector operations not yet implemented | +|`./ai/openai`| 🔄 | Makes use of the OpenAI Python package. AzureOpenAI* not implemented | +|`./configuration`|✅ | Direct port. Check inline docs | +|`./core_skills`| 🔄 | `TextMemorySkill` implemented. Others not | +|`./diagnostics` | ✅ | Direct port of custom exceptions and validation helpers | +|`./kernel_extensions` | 🔄 | Extensions take kernel as first argument and are exposed via `sk.extensions.*` +|`./memory`| 🔄 | Can simplify by relying on Numpy NDArray +|`./planning`| ❌ | Not yet implemented +|`./semantic_functions/partitioning`| ❌ | Not yet implemented + + +## Status of the Port + +The port has a bulk of the Semantic Kernel C# code re-implemented, but is not yet fully complete. Major things like `tests` and `docs` are still missing. +Here is a breakdown by sub-module on the status of this port: + +### `./ai/embeddings` (Partial) + +For now, `VectorOperations` from the original kernel will be skipped. We can use +`numpy`'s `ndarray` as an efficient embedding representation. We can also use +`numpy`'s optimized vector and matrix operations to do things like cosine similarity +quickly and efficiently. + +The `IEmbeddingIndex` interface has been translated to the `EmbeddingIndexBase` abstract +class. The `IEmbeddingGenerator` interface has been translated to the +`embedding_generator_base` abstract class. + +The C# code makes use of extension methods to attach convenience methods to many interfaces +and classes. In Python we don't have that luxury. Instead, these methods are in the corresponding class definition. +(We can revisit this, but for good type hinting avoiding something fancy/dynamic works best.) + +### `./ai/openai` (Partial) + +The abstract clients (`(Azure)OpenAIClientAbstract`) have been ignored here. The `HttpSchema` +submodule is not needed given we have the `openai` package to do the heavy lifting (bonus: that +package will stay in-sync with OpenAI's updates, like the new ChatGPT API). + +The `./ai/openai/services` module is retained and has the same classes/structure. + +#### TODOs + +The `AzureOpenAI*` alternatives are not yet implemented. This would be a great, low difficulty +task for a new contributor to pick up. + +### `./ai` (Complete?) + +The rest of the classes at the top-level of the `./ai` module have been ported +directly. + +**NOTE:** here, we've locked ourselves into getting a _single_ completion +from the model. This isn't ideal. Getting multiple completions is sometimes a great +way to solve more challenging tasks (majority voting, re-ranking, etc.). We should look +at supporting multiple completions. + +**NOTE:** Based on `CompleteRequestSettings` no easy way to grab the `logprobs` +associated with the models completion. This would be huge for techniques like re-ranking +and also very crucial data to capture for metrics. We should think about how to +support this. (We're currently a "text in text out" library, but multiple completions +and logprobs seems to be fundamental in this space.) + +### `./configuration` (Complete?) + +Direct port, not much to do here. Probably check for good inline docs. + +### `./core_skills` (Partial) + +We've implemented the `TextMemorySkill` but are missing the following: + +- `ConversationSummarySkill` +- `FileIOSkill` +- `HttpSkill` +- `PlannerSkill` (NOTE: planner is a big sub-module we're missing) +- `TextSkill` +- `TimeSkill` + +#### TODOs + +Any of these individual core skills would be create low--medium difficulty contributions +for those looking for something to do. Ideally with good docs and corresponding tests. + +### `./diagnostics` (Complete?) + +Pretty direct port of these few custom exceptions and validation helpers. + +### `./kernel_extensions` (Partial) + +This is difficult, for good type hinting there's a lot of duplication. Not having the +convenience of extension methods makes this cumbersome. Maybe, in the future, we may +want to consider some form of "plugins" for the kernel? + +For now, the kernel extensions take the kernel as the first argument and are exposed +via the `sk.extensions.*` namespace. + +### `./memory` (Partial) + +This was a complex sub-system to port. The C# code has lots of interfaces and nesting +of types and generics. In Python, we can simplify this a lot. An embedding +is an `ndarray`. There's lots of great pre-built features that come with that. The +rest of the system is a pretty direct port but the layering can be a bit confusing. +I.e. What's the real difference between storage, memory, memory record, +data entry, an embedding, a collection, etc.? + +#### TODOs + +Review of this subsystem. Lots of good testing. Maybe some kind of overview +documentation about the design. Maybe a diagram of how all these classes and interfaces +fit together? + +### `./orchestration` (Complete?) + +This was a pretty core piece and another direct port. Worth double checking. Needs good docs and tests. + +### `./planning` (TODO: nothing yet) + +Completely ignored planning for now (and, selfishly, planning isn't a priority for +SK-based experimentation). + +### `./reliability` (Complete?) + +Direct port. Nothing much going on in this sub-module. Likely could use more strategies +for retry. Also wasn't quite sure if this was integrated with the kernel/backends? +(Like are we actually using the re-try code, or is it not hit) + +#### TODOs + +Implement a real retry strategy that has backoff perhaps. Make sure this code is integrated +and actually in use. + +### `./semantic_functions` (Complete?) + +Another core piece. The different config classes start to feel cumbersome here +(func config, prompt config, backend config, kernel config, so so much config). + +### `./semantic_functions/partitioning` (TODO: nothing yet) + +Skipped this sub-sub-module for now. Good task for someone to pick up! + +### `./skill_definition` (Complete?) + +Another core piece, another pretty direct port. + +**NOTE:** the attributes in C# become decorators in Python. We probably could +make it feel a bit more pythonic (instead of having multiple decorators have just +one or two). + +**NOTE:** The skill collection, read only skill collection, etc. became a bit +confusing (in terms of the relationship between everything). Would be good to +double check my work there. + +### `./template_engine` (Complete?) + +Love the prompt templates! Have tried some basic prompts, prompts w/ vars, +and prompts that call native functions. Seems to be working. + +**NOTE:** this module definitely needs some good tests. There can be see some +subtle errors sneaking into the prompt tokenization/rendering code here. + +### `./text` (TODO: nothing yet) + +Ignored this module for now. + +### `` (Partial) + +Have a working `Kernel` and a working `KernelBuilder`. The base interface +and custom exception are ported. the `Kernel` in particular +is missing some things, has some bugs, could be cleaner, etc. + +## Overall TODOs + +We are currently missing a lot of the doc comments from C#. So a good review +of the code and a sweep for missing doc comments would be great. + +We also are missing any _testing_. We should figure out how we want to test +(I think this project is auto-setup for `pytest`). + +Finally, we are missing a lot of examples. It'd be great to have Python notebooks +that show off many of the features, many of the core skills, etc. + + +## Design Choices + +We want the overall design of the kernel to be as similar as possible to C#. +We also want to minimize the number of external dependencies to make the Kernel as lightweight as possible. + +Right now, compared to C# there are two key differences: + +1. Use `numpy` to store embeddings and do things like vector/matrix ops +2. Use `openai` to interface with (Azure) OpenAI + +There's also a lot of more subtle differences that come with moving to Python, +things like static properties, no method overloading, no extension methods, etc. diff --git a/python/Makefile b/python/Makefile new file mode 100644 index 000000000000..8fdeec500379 --- /dev/null +++ b/python/Makefile @@ -0,0 +1,54 @@ +SHELL = bash + +.PHONY: help install recreate-env pre-commit + +help: + @echo -e "\033[1mUSAGE:\033[0m" + @echo " make [target]" + @echo "" + @echo -e "\033[1mTARGETS:\033[0m" + @echo " install - install Poetry and project dependencies" + @echo " install-pre-commit - install and configure pre-commit hooks" + @echo " pre-commit - run pre-commit hooks on all files" + @echo " recreate-env - destroy and recreate Poetry's virtualenv" + +.ONESHELL: +install: + @# Check to make sure Python is installed + @if ! command -v python3 &> /dev/null + then + echo "Python could not be found" + echo "Please install Python" + exit 1 + fi + + @# Check if Poetry is installed + @if ! command -v poetry &> /dev/null + then + echo "Poetry could not be found" + echo "Installing Poetry" + curl -sSL https://install.python-poetry.org | python3 - + fi + + # Install the dependencies + poetry install + +.ONESHELL: +recreate-env: + # Stop the current virtualenv if active or alternative use + # `exit` to exit from a Poetry shell session + (deactivate || exit 0) + + # Remove all the files of the current environment of the folder we are in + export POETRY_LOCATION=$$(poetry env info -p) + echo "Poetry is $${POETRY_LOCATION}" + rm -rf "$${POETRY_LOCATION}" + +pre-commit: + poetry run pre-commit run --all-files -c .conf/.pre-commit-config.yaml + +.ONESHELL: +install-pre-commit: + poetry run pre-commit install + # Edit the pre-commit config file to change the config path + sed -i 's|\.pre-commit-config\.yaml|\.conf/\.pre-commit-config\.yaml|g' .git/hooks/pre-commit diff --git a/python/README.md b/python/README.md new file mode 100644 index 000000000000..707d9f32a690 --- /dev/null +++ b/python/README.md @@ -0,0 +1,72 @@ +# Quickstart with Poetry + +## Installation + +Install the Poetry package manager and create a project virtual environment. + +```bash +# Install poetry package +pip3 install poetry +# Use poetry to install project deps +poetry install +# Use poetry to activate project venv +poetry shell +``` + +Make sure you have an +[Open AI API Key](https://openai.com/api/) or +[Azure Open AI service key](https://learn.microsoft.com/azure/cognitive-services/openai/quickstart?pivots=rest-api) + +Copy those keys into a `.env` file in this repo + +``` +OPENAI_API_KEY="" +OPENAI_ORG_ID="" +AZURE_OPENAI_API_KEY="" +AZURE_OPENAI_ENDPOINT="" +``` + +### Quickstart ⚡ + +```python +import semantic_kernel as sk + +kernel = sk.create_kernel() + +api_key, org_id = sk.openai_settings_from_dot_env() + +kernel.config.add_openai_completion_backend( + "davinci-002", "text-davinci-002", api_key, org_id +) + +sk_prompt = """ +{{$input}} + +Give me the TLDR in 5 words. +""" + +text_to_summarize = """ + 1) A robot may not injure a human being or, through inaction, + allow a human being to come to harm. + + 2) A robot must obey orders given it by human beings except where + such orders would conflict with the First Law. + + 3) A robot must protect its own existence as long as such protection + does not conflict with the First or Second Law. +""" + +tldr_function = sk.extensions.create_semantic_function( + kernel, + sk_prompt, + max_tokens=200, + temperature=0, + top_p=0.5, +) + +summary = await kernel.run_on_str_async(text_to_summarize, tldr_function) +output = str(summary.variables).strip() +print("Output: " + output) + +# Output: Protect humans, follow orders, survive. +``` diff --git a/python/poetry.lock b/python/poetry.lock new file mode 100644 index 000000000000..654ec17b4dae --- /dev/null +++ b/python/poetry.lock @@ -0,0 +1,1785 @@ +# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.8.4" +description = "Async http client/server framework (asyncio)" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5ce45967538fb747370308d3145aa68a074bdecb4f3a300869590f725ced69c1"}, + {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b744c33b6f14ca26b7544e8d8aadff6b765a80ad6164fb1a430bbadd593dfb1a"}, + {file = "aiohttp-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a45865451439eb320784918617ba54b7a377e3501fb70402ab84d38c2cd891b"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a86d42d7cba1cec432d47ab13b6637bee393a10f664c425ea7b305d1301ca1a3"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee3c36df21b5714d49fc4580247947aa64bcbe2939d1b77b4c8dcb8f6c9faecc"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:176a64b24c0935869d5bbc4c96e82f89f643bcdf08ec947701b9dbb3c956b7dd"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c844fd628851c0bc309f3c801b3a3d58ce430b2ce5b359cd918a5a76d0b20cb5"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5393fb786a9e23e4799fec788e7e735de18052f83682ce2dfcabaf1c00c2c08e"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e4b09863aae0dc965c3ef36500d891a3ff495a2ea9ae9171e4519963c12ceefd"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:adfbc22e87365a6e564c804c58fc44ff7727deea782d175c33602737b7feadb6"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:147ae376f14b55f4f3c2b118b95be50a369b89b38a971e80a17c3fd623f280c9"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:eafb3e874816ebe2a92f5e155f17260034c8c341dad1df25672fb710627c6949"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6cc15d58053c76eacac5fa9152d7d84b8d67b3fde92709195cb984cfb3475ea"}, + {file = "aiohttp-3.8.4-cp310-cp310-win32.whl", hash = "sha256:59f029a5f6e2d679296db7bee982bb3d20c088e52a2977e3175faf31d6fb75d1"}, + {file = "aiohttp-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:fe7ba4a51f33ab275515f66b0a236bcde4fb5561498fe8f898d4e549b2e4509f"}, + {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d8ef1a630519a26d6760bc695842579cb09e373c5f227a21b67dc3eb16cfea4"}, + {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b3f2e06a512e94722886c0827bee9807c86a9f698fac6b3aee841fab49bbfb4"}, + {file = "aiohttp-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a80464982d41b1fbfe3154e440ba4904b71c1a53e9cd584098cd41efdb188ef"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b631e26df63e52f7cce0cce6507b7a7f1bc9b0c501fcde69742130b32e8782f"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f43255086fe25e36fd5ed8f2ee47477408a73ef00e804cb2b5cba4bf2ac7f5e"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d347a172f866cd1d93126d9b239fcbe682acb39b48ee0873c73c933dd23bd0f"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3fec6a4cb5551721cdd70473eb009d90935b4063acc5f40905d40ecfea23e05"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80a37fe8f7c1e6ce8f2d9c411676e4bc633a8462844e38f46156d07a7d401654"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d1e6a862b76f34395a985b3cd39a0d949ca80a70b6ebdea37d3ab39ceea6698a"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cd468460eefef601ece4428d3cf4562459157c0f6523db89365202c31b6daebb"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:618c901dd3aad4ace71dfa0f5e82e88b46ef57e3239fc7027773cb6d4ed53531"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:652b1bff4f15f6287550b4670546a2947f2a4575b6c6dff7760eafb22eacbf0b"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80575ba9377c5171407a06d0196b2310b679dc752d02a1fcaa2bc20b235dbf24"}, + {file = "aiohttp-3.8.4-cp311-cp311-win32.whl", hash = "sha256:bbcf1a76cf6f6dacf2c7f4d2ebd411438c275faa1dc0c68e46eb84eebd05dd7d"}, + {file = "aiohttp-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:6e74dd54f7239fcffe07913ff8b964e28b712f09846e20de78676ce2a3dc0bfc"}, + {file = "aiohttp-3.8.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:880e15bb6dad90549b43f796b391cfffd7af373f4646784795e20d92606b7a51"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb96fa6b56bb536c42d6a4a87dfca570ff8e52de2d63cabebfd6fb67049c34b6"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a6cadebe132e90cefa77e45f2d2f1a4b2ce5c6b1bfc1656c1ddafcfe4ba8131"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f352b62b45dff37b55ddd7b9c0c8672c4dd2eb9c0f9c11d395075a84e2c40f75"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ab43061a0c81198d88f39aaf90dae9a7744620978f7ef3e3708339b8ed2ef01"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9cb1565a7ad52e096a6988e2ee0397f72fe056dadf75d17fa6b5aebaea05622"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:1b3ea7edd2d24538959c1c1abf97c744d879d4e541d38305f9bd7d9b10c9ec41"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:7c7837fe8037e96b6dd5cfcf47263c1620a9d332a87ec06a6ca4564e56bd0f36"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:3b90467ebc3d9fa5b0f9b6489dfb2c304a1db7b9946fa92aa76a831b9d587e99"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:cab9401de3ea52b4b4c6971db5fb5c999bd4260898af972bf23de1c6b5dd9d71"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d1f9282c5f2b5e241034a009779e7b2a1aa045f667ff521e7948ea9b56e0c5ff"}, + {file = "aiohttp-3.8.4-cp36-cp36m-win32.whl", hash = "sha256:5e14f25765a578a0a634d5f0cd1e2c3f53964553a00347998dfdf96b8137f777"}, + {file = "aiohttp-3.8.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4c745b109057e7e5f1848c689ee4fb3a016c8d4d92da52b312f8a509f83aa05e"}, + {file = "aiohttp-3.8.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:aede4df4eeb926c8fa70de46c340a1bc2c6079e1c40ccf7b0eae1313ffd33519"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ddaae3f3d32fc2cb4c53fab020b69a05c8ab1f02e0e59665c6f7a0d3a5be54f"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4eb3b82ca349cf6fadcdc7abcc8b3a50ab74a62e9113ab7a8ebc268aad35bb9"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bcb89336efa095ea21b30f9e686763f2be4478f1b0a616969551982c4ee4c3b"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c08e8ed6fa3d477e501ec9db169bfac8140e830aa372d77e4a43084d8dd91ab"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c6cd05ea06daca6ad6a4ca3ba7fe7dc5b5de063ff4daec6170ec0f9979f6c332"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7a00a9ed8d6e725b55ef98b1b35c88013245f35f68b1b12c5cd4100dddac333"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:de04b491d0e5007ee1b63a309956eaed959a49f5bb4e84b26c8f5d49de140fa9"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:40653609b3bf50611356e6b6554e3a331f6879fa7116f3959b20e3528783e699"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dbf3a08a06b3f433013c143ebd72c15cac33d2914b8ea4bea7ac2c23578815d6"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854f422ac44af92bfe172d8e73229c270dc09b96535e8a548f99c84f82dde241"}, + {file = "aiohttp-3.8.4-cp37-cp37m-win32.whl", hash = "sha256:aeb29c84bb53a84b1a81c6c09d24cf33bb8432cc5c39979021cc0f98c1292a1a"}, + {file = "aiohttp-3.8.4-cp37-cp37m-win_amd64.whl", hash = "sha256:db3fc6120bce9f446d13b1b834ea5b15341ca9ff3f335e4a951a6ead31105480"}, + {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fabb87dd8850ef0f7fe2b366d44b77d7e6fa2ea87861ab3844da99291e81e60f"}, + {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91f6d540163f90bbaef9387e65f18f73ffd7c79f5225ac3d3f61df7b0d01ad15"}, + {file = "aiohttp-3.8.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d265f09a75a79a788237d7f9054f929ced2e69eb0bb79de3798c468d8a90f945"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d89efa095ca7d442a6d0cbc755f9e08190ba40069b235c9886a8763b03785da"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dac314662f4e2aa5009977b652d9b8db7121b46c38f2073bfeed9f4049732cd"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe11310ae1e4cd560035598c3f29d86cef39a83d244c7466f95c27ae04850f10"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ddb2a2026c3f6a68c3998a6c47ab6795e4127315d2e35a09997da21865757f8"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e75b89ac3bd27d2d043b234aa7b734c38ba1b0e43f07787130a0ecac1e12228a"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6e601588f2b502c93c30cd5a45bfc665faaf37bbe835b7cfd461753068232074"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a5d794d1ae64e7753e405ba58e08fcfa73e3fad93ef9b7e31112ef3c9a0efb52"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a1f4689c9a1462f3df0a1f7e797791cd6b124ddbee2b570d34e7f38ade0e2c71"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3032dcb1c35bc330134a5b8a5d4f68c1a87252dfc6e1262c65a7e30e62298275"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8189c56eb0ddbb95bfadb8f60ea1b22fcfa659396ea36f6adcc521213cd7b44d"}, + {file = "aiohttp-3.8.4-cp38-cp38-win32.whl", hash = "sha256:33587f26dcee66efb2fff3c177547bd0449ab7edf1b73a7f5dea1e38609a0c54"}, + {file = "aiohttp-3.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:e595432ac259af2d4630008bf638873d69346372d38255774c0e286951e8b79f"}, + {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5a7bdf9e57126dc345b683c3632e8ba317c31d2a41acd5800c10640387d193ed"}, + {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:22f6eab15b6db242499a16de87939a342f5a950ad0abaf1532038e2ce7d31567"}, + {file = "aiohttp-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7235604476a76ef249bd64cb8274ed24ccf6995c4a8b51a237005ee7a57e8643"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea9eb976ffdd79d0e893869cfe179a8f60f152d42cb64622fca418cd9b18dc2a"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92c0cea74a2a81c4c76b62ea1cac163ecb20fb3ba3a75c909b9fa71b4ad493cf"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:493f5bc2f8307286b7799c6d899d388bbaa7dfa6c4caf4f97ef7521b9cb13719"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a63f03189a6fa7c900226e3ef5ba4d3bd047e18f445e69adbd65af433add5a2"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10c8cefcff98fd9168cdd86c4da8b84baaa90bf2da2269c6161984e6737bf23e"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bca5f24726e2919de94f047739d0a4fc01372801a3672708260546aa2601bf57"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:03baa76b730e4e15a45f81dfe29a8d910314143414e528737f8589ec60cf7391"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8c29c77cc57e40f84acef9bfb904373a4e89a4e8b74e71aa8075c021ec9078c2"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:03543dcf98a6619254b409be2d22b51f21ec66272be4ebda7b04e6412e4b2e14"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17b79c2963db82086229012cff93ea55196ed31f6493bb1ccd2c62f1724324e4"}, + {file = "aiohttp-3.8.4-cp39-cp39-win32.whl", hash = "sha256:34ce9f93a4a68d1272d26030655dd1b58ff727b3ed2a33d80ec433561b03d67a"}, + {file = "aiohttp-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:41a86a69bb63bb2fc3dc9ad5ea9f10f1c9c8e282b471931be0268ddd09430b04"}, + {file = "aiohttp-3.8.4.tar.gz", hash = "sha256:bf2e1a9162c1e441bf805a1fd166e249d574ca04e03b34f97e2928769e91ab5c"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = ">=4.0.0a3,<5.0" +attrs = ">=17.3.0" +charset-normalizer = ">=2.0,<4.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "cchardet"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "appnope" +version = "0.1.3" +description = "Disable App Nap on macOS >= 10.9" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, + {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, +] + +[[package]] +name = "asttokens" +version = "2.2.1" +description = "Annotate AST trees with source code positions" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, + {file = "asttokens-2.2.1.tar.gz", hash = "sha256:4622110b2a6f30b77e1473affaa97e711bc2f07d3f10848420ff1898edbe94f3"}, +] + +[package.dependencies] +six = "*" + +[package.extras] +test = ["astroid", "pytest"] + +[[package]] +name = "async-timeout" +version = "4.0.2" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, + {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, +] + +[[package]] +name = "attrs" +version = "22.2.0" +description = "Classes Without Boilerplate" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] + +[[package]] +name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] + +[[package]] +name = "black" +version = "23.1.0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"}, + {file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"}, + {file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"}, + {file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"}, + {file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"}, + {file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"}, + {file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"}, + {file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"}, + {file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"}, + {file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"}, + {file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"}, + {file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"}, + {file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"}, + {file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "certifi" +version = "2022.12.7" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, + {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, +] + +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "cfgv" +version = "3.3.1" +description = "Validate configuration and produce human readable error messages." +category = "dev" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, + {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.1.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, +] + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "comm" +version = "0.1.2" +description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "comm-0.1.2-py3-none-any.whl", hash = "sha256:9f3abf3515112fa7c55a42a6a5ab358735c9dccc8b5910a9d8e3ef5998130666"}, + {file = "comm-0.1.2.tar.gz", hash = "sha256:3e2f5826578e683999b93716285b3b1f344f157bf75fa9ce0a797564e742f062"}, +] + +[package.dependencies] +traitlets = ">=5.3" + +[package.extras] +test = ["pytest"] + +[[package]] +name = "debugpy" +version = "1.6.6" +description = "An implementation of the Debug Adapter Protocol for Python" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "debugpy-1.6.6-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:0ea1011e94416e90fb3598cc3ef5e08b0a4dd6ce6b9b33ccd436c1dffc8cd664"}, + {file = "debugpy-1.6.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dff595686178b0e75580c24d316aa45a8f4d56e2418063865c114eef651a982e"}, + {file = "debugpy-1.6.6-cp310-cp310-win32.whl", hash = "sha256:87755e173fcf2ec45f584bb9d61aa7686bb665d861b81faa366d59808bbd3494"}, + {file = "debugpy-1.6.6-cp310-cp310-win_amd64.whl", hash = "sha256:72687b62a54d9d9e3fb85e7a37ea67f0e803aaa31be700e61d2f3742a5683917"}, + {file = "debugpy-1.6.6-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:78739f77c58048ec006e2b3eb2e0cd5a06d5f48c915e2fc7911a337354508110"}, + {file = "debugpy-1.6.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23c29e40e39ad7d869d408ded414f6d46d82f8a93b5857ac3ac1e915893139ca"}, + {file = "debugpy-1.6.6-cp37-cp37m-win32.whl", hash = "sha256:7aa7e103610e5867d19a7d069e02e72eb2b3045b124d051cfd1538f1d8832d1b"}, + {file = "debugpy-1.6.6-cp37-cp37m-win_amd64.whl", hash = "sha256:f6383c29e796203a0bba74a250615ad262c4279d398e89d895a69d3069498305"}, + {file = "debugpy-1.6.6-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:23363e6d2a04d726bbc1400bd4e9898d54419b36b2cdf7020e3e215e1dcd0f8e"}, + {file = "debugpy-1.6.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b5d1b13d7c7bf5d7cf700e33c0b8ddb7baf030fcf502f76fc061ddd9405d16c"}, + {file = "debugpy-1.6.6-cp38-cp38-win32.whl", hash = "sha256:70ab53918fd907a3ade01909b3ed783287ede362c80c75f41e79596d5ccacd32"}, + {file = "debugpy-1.6.6-cp38-cp38-win_amd64.whl", hash = "sha256:c05349890804d846eca32ce0623ab66c06f8800db881af7a876dc073ac1c2225"}, + {file = "debugpy-1.6.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a771739902b1ae22a120dbbb6bd91b2cae6696c0e318b5007c5348519a4211c6"}, + {file = "debugpy-1.6.6-cp39-cp39-win32.whl", hash = "sha256:549ae0cb2d34fc09d1675f9b01942499751d174381b6082279cf19cdb3c47cbe"}, + {file = "debugpy-1.6.6-cp39-cp39-win_amd64.whl", hash = "sha256:de4a045fbf388e120bb6ec66501458d3134f4729faed26ff95de52a754abddb1"}, + {file = "debugpy-1.6.6-py2.py3-none-any.whl", hash = "sha256:be596b44448aac14eb3614248c91586e2bc1728e020e82ef3197189aae556115"}, + {file = "debugpy-1.6.6.zip", hash = "sha256:b9c2130e1c632540fbf9c2c88341493797ddf58016e7cba02e311de9b0a96b67"}, +] + +[[package]] +name = "decorator" +version = "5.1.1" +description = "Decorators for Humans" +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "distlib" +version = "0.3.6" +description = "Distribution utilities" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, + {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.1.1" +description = "Backport of PEP 654 (exception groups)" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, + {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "executing" +version = "1.2.0" +description = "Get the currently executing AST node of a frame, and other information" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, + {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, +] + +[package.extras] +tests = ["asttokens", "littleutils", "pytest", "rich"] + +[[package]] +name = "filelock" +version = "3.10.0" +description = "A platform independent file lock." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "filelock-3.10.0-py3-none-any.whl", hash = "sha256:e90b34656470756edf8b19656785c5fea73afa1953f3e1b0d645cef11cab3182"}, + {file = "filelock-3.10.0.tar.gz", hash = "sha256:3199fd0d3faea8b911be52b663dfccceb84c95949dd13179aa21436d1a79c4ce"}, +] + +[package.extras] +docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.1)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "frozenlist" +version = "1.3.3" +description = "A list-like structure which implements collections.abc.MutableSequence" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"}, + {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"}, + {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"}, + {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"}, + {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"}, + {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"}, + {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"}, + {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"}, + {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"}, + {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"}, + {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"}, +] + +[[package]] +name = "identify" +version = "2.5.21" +description = "File identification library for Python" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "identify-2.5.21-py2.py3-none-any.whl", hash = "sha256:69edcaffa8e91ae0f77d397af60f148b6b45a8044b2cc6d99cafa5b04793ff00"}, + {file = "identify-2.5.21.tar.gz", hash = "sha256:7671a05ef9cfaf8ff63b15d45a91a1147a03aaccb2976d4e9bd047cbbc508471"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "importlib-metadata" +version = "6.0.0" +description = "Read metadata from Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"}, + {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "ipykernel" +version = "6.21.3" +description = "IPython Kernel for Jupyter" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipykernel-6.21.3-py3-none-any.whl", hash = "sha256:24ebd9715e317c185e37156ab3a87382410185230dde7aeffce389d6c7d4428a"}, + {file = "ipykernel-6.21.3.tar.gz", hash = "sha256:c8ff581905d70e7299bc1473a2f7c113bec1744fb3746d58e5b4b93bd8ee7001"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "platform_system == \"Darwin\""} +comm = ">=0.1.1" +debugpy = ">=1.6.5" +ipython = ">=7.23.1" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +matplotlib-inline = ">=0.1" +nest-asyncio = "*" +packaging = "*" +psutil = "*" +pyzmq = ">=20" +tornado = ">=6.1" +traitlets = ">=5.4.0" + +[package.extras] +cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] +pyqt5 = ["pyqt5"] +pyside6 = ["pyside6"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "ipython" +version = "8.11.0" +description = "IPython: Productive Interactive Computing" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "ipython-8.11.0-py3-none-any.whl", hash = "sha256:5b54478e459155a326bf5f42ee4f29df76258c0279c36f21d71ddb560f88b156"}, + {file = "ipython-8.11.0.tar.gz", hash = "sha256:735cede4099dbc903ee540307b9171fbfef4aa75cfcacc5a273b2cda2f02be04"}, +] + +[package.dependencies] +appnope = {version = "*", markers = "sys_platform == \"darwin\""} +backcall = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} +pickleshare = "*" +prompt-toolkit = ">=3.0.30,<3.0.37 || >3.0.37,<3.1.0" +pygments = ">=2.4.0" +stack-data = "*" +traitlets = ">=5" + +[package.extras] +all = ["black", "curio", "docrepr", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.21)", "pandas", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "trio", "typing-extensions"] +black = ["black"] +doc = ["docrepr", "ipykernel", "matplotlib", "pytest (<7)", "pytest (<7.1)", "pytest-asyncio", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "stack-data", "testpath", "typing-extensions"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["ipywidgets", "notebook"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.21)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] + +[[package]] +name = "jedi" +version = "0.18.2" +description = "An autocompletion tool for Python that can be used for text editors." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"}, + {file = "jedi-0.18.2.tar.gz", hash = "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612"}, +] + +[package.dependencies] +parso = ">=0.8.0,<0.9.0" + +[package.extras] +docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] + +[[package]] +name = "jupyter-client" +version = "8.0.3" +description = "Jupyter protocol implementation and client libraries" +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_client-8.0.3-py3-none-any.whl", hash = "sha256:be48ac6bd659cbbddb7a674cf06b3b8afbf53f228253cf58bde604c03bd487b0"}, + {file = "jupyter_client-8.0.3.tar.gz", hash = "sha256:ed65498bea6d876ef9d8da3e0db3dd33c5d129f5b2645f56ae03993782966bd0"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=4.8.3", markers = "python_version < \"3.10\""} +jupyter-core = ">=4.12,<5.0.0 || >=5.1.0" +python-dateutil = ">=2.8.2" +pyzmq = ">=23.0" +tornado = ">=6.2" +traitlets = ">=5.3" + +[package.extras] +docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"] +test = ["codecov", "coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"] + +[[package]] +name = "jupyter-core" +version = "5.3.0" +description = "Jupyter core package. A base package on which Jupyter projects rely." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jupyter_core-5.3.0-py3-none-any.whl", hash = "sha256:d4201af84559bc8c70cead287e1ab94aeef3c512848dde077b7684b54d67730d"}, + {file = "jupyter_core-5.3.0.tar.gz", hash = "sha256:6db75be0c83edbf1b7c9f91ec266a9a24ef945da630f3120e1a0046dc13713fc"}, +] + +[package.dependencies] +platformdirs = ">=2.5" +pywin32 = {version = ">=300", markers = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\""} +traitlets = ">=5.3" + +[package.extras] +docs = ["myst-parser", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] +test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] + +[[package]] +name = "matplotlib-inline" +version = "0.1.6" +description = "Inline Matplotlib backend for Jupyter" +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "matplotlib-inline-0.1.6.tar.gz", hash = "sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"}, + {file = "matplotlib_inline-0.1.6-py3-none-any.whl", hash = "sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311"}, +] + +[package.dependencies] +traitlets = "*" + +[[package]] +name = "multidict" +version = "6.0.4" +description = "multidict implementation" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nest-asyncio" +version = "1.5.6" +description = "Patch asyncio to allow nested event loops" +category = "dev" +optional = false +python-versions = ">=3.5" +files = [ + {file = "nest_asyncio-1.5.6-py3-none-any.whl", hash = "sha256:b9a953fb40dceaa587d109609098db21900182b16440652454a146cffb06e8b8"}, + {file = "nest_asyncio-1.5.6.tar.gz", hash = "sha256:d267cc1ff794403f7df692964d1d2a3fa9418ffea2a3f6859a439ff482fef290"}, +] + +[[package]] +name = "nodeenv" +version = "1.7.0" +description = "Node.js virtual environment builder" +category = "dev" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"}, + {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "numpy" +version = "1.24.2" +description = "Fundamental package for array computing in Python" +category = "main" +optional = false +python-versions = ">=3.8" +files = [ + {file = "numpy-1.24.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eef70b4fc1e872ebddc38cddacc87c19a3709c0e3e5d20bf3954c147b1dd941d"}, + {file = "numpy-1.24.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8d2859428712785e8a8b7d2b3ef0a1d1565892367b32f915c4a4df44d0e64f5"}, + {file = "numpy-1.24.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6524630f71631be2dabe0c541e7675db82651eb998496bbe16bc4f77f0772253"}, + {file = "numpy-1.24.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a51725a815a6188c662fb66fb32077709a9ca38053f0274640293a14fdd22978"}, + {file = "numpy-1.24.2-cp310-cp310-win32.whl", hash = "sha256:2620e8592136e073bd12ee4536149380695fbe9ebeae845b81237f986479ffc9"}, + {file = "numpy-1.24.2-cp310-cp310-win_amd64.whl", hash = "sha256:97cf27e51fa078078c649a51d7ade3c92d9e709ba2bfb97493007103c741f1d0"}, + {file = "numpy-1.24.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7de8fdde0003f4294655aa5d5f0a89c26b9f22c0a58790c38fae1ed392d44a5a"}, + {file = "numpy-1.24.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4173bde9fa2a005c2c6e2ea8ac1618e2ed2c1c6ec8a7657237854d42094123a0"}, + {file = "numpy-1.24.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cecaed30dc14123020f77b03601559fff3e6cd0c048f8b5289f4eeabb0eb281"}, + {file = "numpy-1.24.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a23f8440561a633204a67fb44617ce2a299beecf3295f0d13c495518908e910"}, + {file = "numpy-1.24.2-cp311-cp311-win32.whl", hash = "sha256:e428c4fbfa085f947b536706a2fc349245d7baa8334f0c5723c56a10595f9b95"}, + {file = "numpy-1.24.2-cp311-cp311-win_amd64.whl", hash = "sha256:557d42778a6869c2162deb40ad82612645e21d79e11c1dc62c6e82a2220ffb04"}, + {file = "numpy-1.24.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d0a2db9d20117bf523dde15858398e7c0858aadca7c0f088ac0d6edd360e9ad2"}, + {file = "numpy-1.24.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c72a6b2f4af1adfe193f7beb91ddf708ff867a3f977ef2ec53c0ffb8283ab9f5"}, + {file = "numpy-1.24.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29e6bd0ec49a44d7690ecb623a8eac5ab8a923bce0bea6293953992edf3a76a"}, + {file = "numpy-1.24.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2eabd64ddb96a1239791da78fa5f4e1693ae2dadc82a76bc76a14cbb2b966e96"}, + {file = "numpy-1.24.2-cp38-cp38-win32.whl", hash = "sha256:e3ab5d32784e843fc0dd3ab6dcafc67ef806e6b6828dc6af2f689be0eb4d781d"}, + {file = "numpy-1.24.2-cp38-cp38-win_amd64.whl", hash = "sha256:76807b4063f0002c8532cfeac47a3068a69561e9c8715efdad3c642eb27c0756"}, + {file = "numpy-1.24.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4199e7cfc307a778f72d293372736223e39ec9ac096ff0a2e64853b866a8e18a"}, + {file = "numpy-1.24.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:adbdce121896fd3a17a77ab0b0b5eedf05a9834a18699db6829a64e1dfccca7f"}, + {file = "numpy-1.24.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:889b2cc88b837d86eda1b17008ebeb679d82875022200c6e8e4ce6cf549b7acb"}, + {file = "numpy-1.24.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f64bb98ac59b3ea3bf74b02f13836eb2e24e48e0ab0145bbda646295769bd780"}, + {file = "numpy-1.24.2-cp39-cp39-win32.whl", hash = "sha256:63e45511ee4d9d976637d11e6c9864eae50e12dc9598f531c035265991910468"}, + {file = "numpy-1.24.2-cp39-cp39-win_amd64.whl", hash = "sha256:a77d3e1163a7770164404607b7ba3967fb49b24782a6ef85d9b5f54126cc39e5"}, + {file = "numpy-1.24.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92011118955724465fb6853def593cf397b4a1367495e0b59a7e69d40c4eb71d"}, + {file = "numpy-1.24.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9006288bcf4895917d02583cf3411f98631275bc67cce355a7f39f8c14338fa"}, + {file = "numpy-1.24.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:150947adbdfeceec4e5926d956a06865c1c690f2fd902efede4ca6fe2e657c3f"}, + {file = "numpy-1.24.2.tar.gz", hash = "sha256:003a9f530e880cb2cd177cba1af7220b9aa42def9c4afc2a2fc3ee6be7eb2b22"}, +] + +[[package]] +name = "openai" +version = "0.27.2" +description = "Python client library for the OpenAI API" +category = "main" +optional = false +python-versions = ">=3.7.1" +files = [ + {file = "openai-0.27.2-py3-none-any.whl", hash = "sha256:6df674cf257e9e0504f1fd191c333d3f6a2442b13218d0eccf06230eb24d320e"}, + {file = "openai-0.27.2.tar.gz", hash = "sha256:5869fdfa34b0ec66c39afa22f4a0fb83a135dff81f6505f52834c6ab3113f762"}, +] + +[package.dependencies] +aiohttp = "*" +requests = ">=2.20" +tqdm = "*" + +[package.extras] +datalib = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +dev = ["black (>=21.6b0,<22.0)", "pytest (>=6.0.0,<7.0.0)", "pytest-asyncio", "pytest-mock"] +embeddings = ["matplotlib", "numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "plotly", "scikit-learn (>=1.0.2)", "scipy", "tenacity (>=8.0.1)"] +wandb = ["numpy", "openpyxl (>=3.0.7)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)", "wandb"] + +[[package]] +name = "packaging" +version = "23.0" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, +] + +[[package]] +name = "parso" +version = "0.8.3" +description = "A Python Parser" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "parso-0.8.3-py2.py3-none-any.whl", hash = "sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"}, + {file = "parso-0.8.3.tar.gz", hash = "sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0"}, +] + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] + +[[package]] +name = "pathspec" +version = "0.11.1" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, + {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, +] + +[[package]] +name = "pexpect" +version = "4.8.0" +description = "Pexpect allows easy control of interactive console applications." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, + {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, +] + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] + +[[package]] +name = "platformdirs" +version = "3.1.1" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"}, + {file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"}, +] + +[package.extras] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "2.21.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"}, + {file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "prompt-toolkit" +version = "3.0.38" +description = "Library for building powerful interactive command lines in Python" +category = "dev" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "prompt_toolkit-3.0.38-py3-none-any.whl", hash = "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"}, + {file = "prompt_toolkit-3.0.38.tar.gz", hash = "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b"}, +] + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "psutil" +version = "5.9.4" +description = "Cross-platform lib for process and system monitoring in Python." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, + {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, + {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, + {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"}, + {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"}, + {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"}, + {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"}, + {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"}, + {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"}, + {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"}, + {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"}, + {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"}, + {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, + {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.2" +description = "Safely evaluate AST nodes without side effects" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "pure_eval-0.2.2-py3-none-any.whl", hash = "sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350"}, + {file = "pure_eval-0.2.2.tar.gz", hash = "sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"}, +] + +[package.extras] +tests = ["pytest"] + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + +[[package]] +name = "pygments" +version = "2.14.0" +description = "Pygments is a syntax highlighting package written in Python." +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, +] + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "pytest" +version = "7.2.0" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.2.0-py3-none-any.whl", hash = "sha256:892f933d339f068883b6fd5a459f03d85bfcb355e4981e146d2c7616c21fef71"}, + {file = "pytest-7.2.0.tar.gz", hash = "sha256:c4014eb40e10f11f355ad4e3c2fb2c6c6d1919c73f3b5a433de4708202cade59"}, +] + +[package.dependencies] +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "dev" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pywin32" +version = "305" +description = "Python for Window Extensions" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-305-cp310-cp310-win32.whl", hash = "sha256:421f6cd86e84bbb696d54563c48014b12a23ef95a14e0bdba526be756d89f116"}, + {file = "pywin32-305-cp310-cp310-win_amd64.whl", hash = "sha256:73e819c6bed89f44ff1d690498c0a811948f73777e5f97c494c152b850fad478"}, + {file = "pywin32-305-cp310-cp310-win_arm64.whl", hash = "sha256:742eb905ce2187133a29365b428e6c3b9001d79accdc30aa8969afba1d8470f4"}, + {file = "pywin32-305-cp311-cp311-win32.whl", hash = "sha256:19ca459cd2e66c0e2cc9a09d589f71d827f26d47fe4a9d09175f6aa0256b51c2"}, + {file = "pywin32-305-cp311-cp311-win_amd64.whl", hash = "sha256:326f42ab4cfff56e77e3e595aeaf6c216712bbdd91e464d167c6434b28d65990"}, + {file = "pywin32-305-cp311-cp311-win_arm64.whl", hash = "sha256:4ecd404b2c6eceaca52f8b2e3e91b2187850a1ad3f8b746d0796a98b4cea04db"}, + {file = "pywin32-305-cp36-cp36m-win32.whl", hash = "sha256:48d8b1659284f3c17b68587af047d110d8c44837736b8932c034091683e05863"}, + {file = "pywin32-305-cp36-cp36m-win_amd64.whl", hash = "sha256:13362cc5aa93c2beaf489c9c9017c793722aeb56d3e5166dadd5ef82da021fe1"}, + {file = "pywin32-305-cp37-cp37m-win32.whl", hash = "sha256:a55db448124d1c1484df22fa8bbcbc45c64da5e6eae74ab095b9ea62e6d00496"}, + {file = "pywin32-305-cp37-cp37m-win_amd64.whl", hash = "sha256:109f98980bfb27e78f4df8a51a8198e10b0f347257d1e265bb1a32993d0c973d"}, + {file = "pywin32-305-cp38-cp38-win32.whl", hash = "sha256:9dd98384da775afa009bc04863426cb30596fd78c6f8e4e2e5bbf4edf8029504"}, + {file = "pywin32-305-cp38-cp38-win_amd64.whl", hash = "sha256:56d7a9c6e1a6835f521788f53b5af7912090674bb84ef5611663ee1595860fc7"}, + {file = "pywin32-305-cp39-cp39-win32.whl", hash = "sha256:9d968c677ac4d5cbdaa62fd3014ab241718e619d8e36ef8e11fb930515a1e918"}, + {file = "pywin32-305-cp39-cp39-win_amd64.whl", hash = "sha256:50768c6b7c3f0b38b7fb14dd4104da93ebced5f1a50dc0e834594bff6fbe1271"}, +] + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, + {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, + {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] + +[[package]] +name = "pyzmq" +version = "25.0.1" +description = "Python bindings for 0MQ" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyzmq-25.0.1-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:94f65e13e6df035b0ae90d49adfe7891aa4e7bdeaa65265729fecc04ab3eb0fe"}, + {file = "pyzmq-25.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f0399450d970990705ce47ed65f5efed3e4627dfc80628c3798100e7b72e023b"}, + {file = "pyzmq-25.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f29709b0431668a967d7ff0394b00a865e7b7dde827ee0a47938b705b7c4aec3"}, + {file = "pyzmq-25.0.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4fee9420b34c0ab426f105926a701a3d73f878fe77f07a1b92e0b78d1e2c795c"}, + {file = "pyzmq-25.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57be375c6bc66b0f685cd298e5c1c3d7ee34a254145b8087aed6e25db372b0f3"}, + {file = "pyzmq-25.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a3309b2c5a5be3d48c9ade77b340361764449aa22854ac65935b1e6c0cdabe2c"}, + {file = "pyzmq-25.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7574d24579e83ee8c5d3b14769e7ba895161c43a601e911dd89d449e545e00ad"}, + {file = "pyzmq-25.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:041d617091258133e602919b28fdce4d3e2f8aedcd1e8b34c599653bc288d59e"}, + {file = "pyzmq-25.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7897ba8c3fedc6b3023bad676ceb69dbf90c077ff18ae3133ba43db47417cc72"}, + {file = "pyzmq-25.0.1-cp310-cp310-win32.whl", hash = "sha256:c462f70dadbd4649e572ca7cd1e7cf3305a8c2afc53b84214c0a7c0c3af8a657"}, + {file = "pyzmq-25.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e3a721710992cf0e213bbb7be48fb0f32202e8d01f556c196c870373bb9ad4f4"}, + {file = "pyzmq-25.0.1-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:b0a0fcf56279b9f3acc9b36a83feb7640c51b0db444b6870e4406d002be1d514"}, + {file = "pyzmq-25.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:95aff52fc847ea5755d2370f86e379ba2ed6eb67a0a6f90f0e8e99c553693b81"}, + {file = "pyzmq-25.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b55366e6c11e1ef7403d072b9867b62cf63eebd31dd038ef65bc8d65572854f6"}, + {file = "pyzmq-25.0.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64a2bc72bcad705ee42a8fe877478ddadb7e260e806562833d3d814125e28a44"}, + {file = "pyzmq-25.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca66aa24422d7f324acd5cb7fc7df616eb6f0205e059393fb108702e33e90c7"}, + {file = "pyzmq-25.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:58d5dfec2e2befd09b04c4683b3c984d2203cf6e054d0f9786be3826737ad612"}, + {file = "pyzmq-25.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3549292d65987e422e2c9f105b1485448381f489d8a6b6b040fc8b8f497bd578"}, + {file = "pyzmq-25.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5b1ca8b0df50d1ac88857ffe9ebd1347e0a5bb5f6e1d99940fdd7df0ffdefb49"}, + {file = "pyzmq-25.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1a107e89cdcf799060ba4fa85fd3c942e19df7b24eb2600618b2406cc73c18e"}, + {file = "pyzmq-25.0.1-cp311-cp311-win32.whl", hash = "sha256:0f22ba4e9041549a5a3f5a545169dda52fa0aa7b5ef46b336cbe6679c4c3c134"}, + {file = "pyzmq-25.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:0644c0d5c73e4bfeee8148f638ab16ad783df1c4d6c2f968552a26a43fb002a1"}, + {file = "pyzmq-25.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c5eb4b17d73b1fc208a4faa6b5918983ccc961770aa37741891f61db302dae4e"}, + {file = "pyzmq-25.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:649dd55948144a108041397f07c1299086ce1c85c2e166831db3a33dac1d0c7f"}, + {file = "pyzmq-25.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c99fd8d3efc138d6a7fb1e822133f62bb18ffec66dc6d398dcb2ac2ab8eb2cb0"}, + {file = "pyzmq-25.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:d72d69d4bb37c05a446d10bc40b391cf8fb7572654fb73fa69e7d2a395197e65"}, + {file = "pyzmq-25.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:036dbf8373aed4ccf56d58c561b23601b8f33919ec1093d8c77b37ac1259702d"}, + {file = "pyzmq-25.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:861c37649c75a2ecfc2034a32b9d5ca744e1e0cddcbf65afbd8027cf7d9755be"}, + {file = "pyzmq-25.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:92f04d63aecbb71d41f7db5f988167ef429f96d8197fd46684688cdb513e8a2e"}, + {file = "pyzmq-25.0.1-cp36-cp36m-win32.whl", hash = "sha256:866a4e918f1f4b2f83e9982b817df257910e3e50e456ffa74f141a10adcd11d1"}, + {file = "pyzmq-25.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:ec29c880b82cd38a63810a93b77e13f167e05732049101947772eed9ae805097"}, + {file = "pyzmq-25.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0241a334e39aa74e4ba0ae5f9e29521f1b48b8d56bf707f25f322c04eb423e99"}, + {file = "pyzmq-25.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3b7032f55b1ed2cd8c349a89e467dca2338b7765fab82cb64c3504e49adaf51"}, + {file = "pyzmq-25.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:960f98f562ee6a50ecf283bc62479d00f5ee10e9068a21683b9e961cd87c9261"}, + {file = "pyzmq-25.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:835da498b71570d56e5526de4d5b36fa10dd9b8a82e2c405f963afeb51ff5bdc"}, + {file = "pyzmq-25.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:21de2ef6099fa8d6a3c2dc15aaca58e9f9ffdcc7b82a246590aa9564815699d9"}, + {file = "pyzmq-25.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e448a5a294958e915a7e1b664e6fbfcd3814989d381fb068673317f6f3ea3f8"}, + {file = "pyzmq-25.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40d909bdc8a2d64ad260925154712602ee6a0425ae0b08bce78a19adfdc2f05b"}, + {file = "pyzmq-25.0.1-cp37-cp37m-win32.whl", hash = "sha256:6ff37f2b818df25c887fd40bb434569db7ff66b35f5dfff6f40cc476aee92e3f"}, + {file = "pyzmq-25.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f66ee27a0221771bbaa2cce456e8ca890569c3d18b08b955eb6420c12516537c"}, + {file = "pyzmq-25.0.1-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:1003bbae89435eadec03b4fa3bb6516dd1529fb09ae5704284f7400cc77009ba"}, + {file = "pyzmq-25.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dde7a65a8bfa88aa1721add504320f8344272542291ce4e7c77993fa32901567"}, + {file = "pyzmq-25.0.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:20b6155429d3b57e9e7bd11f1680985ef8b5b0868f1a64073fb8c01326c7c80c"}, + {file = "pyzmq-25.0.1-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e37a764cbf91c1ed9a02e4fede79a414284aca2a0b7d92d82a3c7b82d678ec2d"}, + {file = "pyzmq-25.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa56a362066b3a853a64d35693a08046f640961efcc0e7643768916403e72e70"}, + {file = "pyzmq-25.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c4bdf1241886d39d816535d3ef9fc325bbf02470c9fd5f2cb62706eeb834f7f2"}, + {file = "pyzmq-25.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:446acbac24427ef42bff61a807ddcad8d03df78fb976184a4d7d6f4b1e7d8a67"}, + {file = "pyzmq-25.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b39847501d229e5fab155d88a565edfb182cdd3f7046f15a7f2df9c77cdc422d"}, + {file = "pyzmq-25.0.1-cp38-cp38-win32.whl", hash = "sha256:cba6b81b653d789d76e438c2e77b49f610b23e84b3bb43b99100f08a0a5d637b"}, + {file = "pyzmq-25.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:6eca6b90c4fb290efd27582780b5eaf048887a32b2c5fcd6330819192cb07b38"}, + {file = "pyzmq-25.0.1-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:58207a6709e53b723105bac6bb3c6795ee134f7e71351f39c09d52ac235c6b0d"}, + {file = "pyzmq-25.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c62084f37682e7ee4064e8310078be4f6f7687bf528ae5761e2ba7216c5b8949"}, + {file = "pyzmq-25.0.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9c44e9f04f8ed99c6f2e9e49f29d400d7557dd9e9e3f64e1e8a595aedc4258a2"}, + {file = "pyzmq-25.0.1-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c635d1c40d341835066931a018e378428dfbe0347ed4bb45a6b57f7d8c34196e"}, + {file = "pyzmq-25.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eef93b5574c9ff36b4be376555efd369bd55b99bcc7be72f23bd38102dd9392b"}, + {file = "pyzmq-25.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44bc81099ab33388f6c061c1b194307d877428cb2b18282d0385584d5c73ed72"}, + {file = "pyzmq-25.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6d988844ed6caa21b0076b64671e83a136d93c57f1ae5a72b915661af55d313b"}, + {file = "pyzmq-25.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9d5eb6e88ae8a8734f239ffe1ed90559a426cf5b859b8ee66e0cd43fc5daf5c9"}, + {file = "pyzmq-25.0.1-cp39-cp39-win32.whl", hash = "sha256:f6b45db9de4c8adbf5fda58e827a32315d282cfb01e54dc74e7c7ccc0988c010"}, + {file = "pyzmq-25.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:47eeb94b78aa442568b85ad28f85bd37a9c3c34d052cbf8ebf8622c45f23a9cd"}, + {file = "pyzmq-25.0.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0ed7475f3adf0c7750d75740b3267947b501a33f4625ceae709fda2e75ec9ed7"}, + {file = "pyzmq-25.0.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6d09c22ed4d0afcc662d17c2429a03fc1fae7fe7e3bc1f413e744bccfeaabdc3"}, + {file = "pyzmq-25.0.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:703ec5f5a8369c09d8f3eb626358bdb590a2b1375bcce8b7da01b3a03f8b8668"}, + {file = "pyzmq-25.0.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20aea31cc0d1f6c3fb4685db08b4c771545cf3fed3c4b4c8942c0a4e97042ec8"}, + {file = "pyzmq-25.0.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b1c03b942557bb366fd3dc377a15763d5d688de1328228136c75e50f968333cc"}, + {file = "pyzmq-25.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4e8a5ced9d92837f52ccdae6351c627b5012669727bc3eede2dc0f581eca1d0e"}, + {file = "pyzmq-25.0.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d78f840d88244272fb7252e47522b1179833aff7ec64583bda3d21259c9c2c20"}, + {file = "pyzmq-25.0.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c3f78fa80780e24d294f9421123cb3bd3b68677953c53da85273a22d1c983298"}, + {file = "pyzmq-25.0.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f6de4305e02560111a5d4555758faa85d44a5bff70cccff58dbf30c81a079f0"}, + {file = "pyzmq-25.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:34a1b1a8ce9b20e01aba71b9279d9b1d4e5980a6a4e42092180e16628a444ca1"}, + {file = "pyzmq-25.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:625759a0112af7c3fb560de5724d749729f00b901f7625d1a3f3fb38897544b1"}, + {file = "pyzmq-25.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cff159b21438c24476a49865f3d5700c9cc5833600661bc0e672decec2ff357"}, + {file = "pyzmq-25.0.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4cc47652d990de9ef967c494c526d73920ef064fef0444355a7cebec6fc50542"}, + {file = "pyzmq-25.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44db5162a6881f7d740dec65917f38f9bfbc5ad9a10e06d7d5deebb27eb63939"}, + {file = "pyzmq-25.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f38bf2c60a3f7b87cf5177043eb7a331a4f53bc9305a2452decbd42ad0c98741"}, + {file = "pyzmq-25.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b1cf4becd15669bc62a41c1b1bb742e22ac25965134e4254cde82a4dc2554b1b"}, + {file = "pyzmq-25.0.1.tar.gz", hash = "sha256:44a24f7ce44e70d20e2a4c9ba5af70b4611df7a4b920eed2c8e0bdd5a5af225f"}, +] + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} + +[[package]] +name = "requests" +version = "2.28.2" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=3.7, <4" +files = [ + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "setuptools" +version = "67.6.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "setuptools-67.6.0-py3-none-any.whl", hash = "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"}, + {file = "setuptools-67.6.0.tar.gz", hash = "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "stack-data" +version = "0.6.2" +description = "Extract data from python stack frames and tracebacks for informative displays" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "stack_data-0.6.2-py3-none-any.whl", hash = "sha256:cbb2a53eb64e5785878201a97ed7c7b94883f48b87bfb0bbe8b623c74679e4a8"}, + {file = "stack_data-0.6.2.tar.gz", hash = "sha256:32d2dd0376772d01b6cb9fc996f3c8b57a357089dec328ed4b6553d037eaf815"}, +] + +[package.dependencies] +asttokens = ">=2.1.0" +executing = ">=1.2.0" +pure-eval = "*" + +[package.extras] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "tornado" +version = "6.2" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "dev" +optional = false +python-versions = ">= 3.7" +files = [ + {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"}, + {file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba09ef14ca9893954244fd872798b4ccb2367c165946ce2dd7376aebdde8e3ac"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8150f721c101abdef99073bf66d3903e292d851bee51910839831caba341a75"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a2f5999215a3a06a4fc218026cd84c61b8b2b40ac5296a6db1f1451ef04c1e"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5f8c52d219d4995388119af7ccaa0bcec289535747620116a58d830e7c25d8a8"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_i686.whl", hash = "sha256:6fdfabffd8dfcb6cf887428849d30cf19a3ea34c2c248461e1f7d718ad30b66b"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:1d54d13ab8414ed44de07efecb97d4ef7c39f7438cf5e976ccd356bebb1b5fca"}, + {file = "tornado-6.2-cp37-abi3-win32.whl", hash = "sha256:5c87076709343557ef8032934ce5f637dbb552efa7b21d08e89ae7619ed0eb23"}, + {file = "tornado-6.2-cp37-abi3-win_amd64.whl", hash = "sha256:e5f923aa6a47e133d1cf87d60700889d7eae68988704e20c75fb2d65677a8e4b"}, + {file = "tornado-6.2.tar.gz", hash = "sha256:9b630419bde84ec666bfd7ea0a4cb2a8a651c2d5cccdbdd1972a0c859dfc3c13"}, +] + +[[package]] +name = "tqdm" +version = "4.65.0" +description = "Fast, Extensible Progress Meter" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.65.0-py3-none-any.whl", hash = "sha256:c4f53a17fe37e132815abceec022631be8ffe1b9381c2e6e30aa70edc99e9671"}, + {file = "tqdm-4.65.0.tar.gz", hash = "sha256:1871fb68a86b8fb3b59ca4cdd3dcccbc7e6d613eeed31f4c332531977b89beb5"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["py-make (>=0.1.0)", "twine", "wheel"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "traitlets" +version = "5.9.0" +description = "Traitlets Python configuration system" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "traitlets-5.9.0-py3-none-any.whl", hash = "sha256:9e6ec080259b9a5940c797d58b613b5e31441c2257b87c2e795c5228ae80d2d8"}, + {file = "traitlets-5.9.0.tar.gz", hash = "sha256:f6cde21a9c68cf756af02035f72d5a723bf607e862e7be33ece505abf4a3bad9"}, +] + +[package.extras] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] +test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] + +[[package]] +name = "typing-extensions" +version = "4.5.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, + {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, +] + +[[package]] +name = "urllib3" +version = "1.26.15" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, + {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "virtualenv" +version = "20.21.0" +description = "Virtual Python Environment builder" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.21.0-py3-none-any.whl", hash = "sha256:31712f8f2a17bd06234fa97fdf19609e789dd4e3e4bf108c3da71d710651adbc"}, + {file = "virtualenv-20.21.0.tar.gz", hash = "sha256:f50e3e60f990a0757c9b68333c9fdaa72d7188caa417f96af9e52407831a3b68"}, +] + +[package.dependencies] +distlib = ">=0.3.6,<1" +filelock = ">=3.4.1,<4" +platformdirs = ">=2.4,<4" + +[package.extras] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "wcwidth" +version = "0.2.6" +description = "Measures the displayed width of unicode strings in a terminal" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.6-py2.py3-none-any.whl", hash = "sha256:795b138f6875577cd91bba52baf9e445cd5118fd32723b460e30a0af30ea230e"}, + {file = "wcwidth-0.2.6.tar.gz", hash = "sha256:a5220780a404dbe3353789870978e472cfe477761f06ee55077256e509b156d0"}, +] + +[[package]] +name = "yarl" +version = "1.8.2" +description = "Yet another URL library" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5"}, + {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:47d49ac96156f0928f002e2424299b2c91d9db73e08c4cd6742923a086f1c863"}, + {file = "yarl-1.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3fc056e35fa6fba63248d93ff6e672c096f95f7836938241ebc8260e062832fe"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58a3c13d1c3005dbbac5c9f0d3210b60220a65a999b1833aa46bd6677c69b08e"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10b08293cda921157f1e7c2790999d903b3fd28cd5c208cf8826b3b508026996"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de986979bbd87272fe557e0a8fcb66fd40ae2ddfe28a8b1ce4eae22681728fef"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c4fcfa71e2c6a3cb568cf81aadc12768b9995323186a10827beccf5fa23d4f8"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae4d7ff1049f36accde9e1ef7301912a751e5bae0a9d142459646114c70ecba6"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf071f797aec5b96abfc735ab97da9fd8f8768b43ce2abd85356a3127909d146"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:74dece2bfc60f0f70907c34b857ee98f2c6dd0f75185db133770cd67300d505f"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:df60a94d332158b444301c7f569659c926168e4d4aad2cfbf4bce0e8fb8be826"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:63243b21c6e28ec2375f932a10ce7eda65139b5b854c0f6b82ed945ba526bff3"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cfa2bbca929aa742b5084fd4663dd4b87c191c844326fcb21c3afd2d11497f80"}, + {file = "yarl-1.8.2-cp310-cp310-win32.whl", hash = "sha256:b05df9ea7496df11b710081bd90ecc3a3db6adb4fee36f6a411e7bc91a18aa42"}, + {file = "yarl-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:24ad1d10c9db1953291f56b5fe76203977f1ed05f82d09ec97acb623a7976574"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a1fca9588f360036242f379bfea2b8b44cae2721859b1c56d033adfd5893634"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f37db05c6051eff17bc832914fe46869f8849de5b92dc4a3466cd63095d23dfd"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77e913b846a6b9c5f767b14dc1e759e5aff05502fe73079f6f4176359d832581"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0978f29222e649c351b173da2b9b4665ad1feb8d1daa9d971eb90df08702668a"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388a45dc77198b2460eac0aca1efd6a7c09e976ee768b0d5109173e521a19daf"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2305517e332a862ef75be8fad3606ea10108662bc6fe08509d5ca99503ac2aee"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42430ff511571940d51e75cf42f1e4dbdded477e71c1b7a17f4da76c1da8ea76"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3150078118f62371375e1e69b13b48288e44f6691c1069340081c3fd12c94d5b"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c15163b6125db87c8f53c98baa5e785782078fbd2dbeaa04c6141935eb6dab7a"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d04acba75c72e6eb90745447d69f84e6c9056390f7a9724605ca9c56b4afcc6"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e7fd20d6576c10306dea2d6a5765f46f0ac5d6f53436217913e952d19237efc4"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:75c16b2a900b3536dfc7014905a128a2bea8fb01f9ee26d2d7d8db0a08e7cb2c"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6d88056a04860a98341a0cf53e950e3ac9f4e51d1b6f61a53b0609df342cc8b2"}, + {file = "yarl-1.8.2-cp311-cp311-win32.whl", hash = "sha256:fb742dcdd5eec9f26b61224c23baea46c9055cf16f62475e11b9b15dfd5c117b"}, + {file = "yarl-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8c46d3d89902c393a1d1e243ac847e0442d0196bbd81aecc94fcebbc2fd5857c"}, + {file = "yarl-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ceff9722e0df2e0a9e8a79c610842004fa54e5b309fe6d218e47cd52f791d7ef"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f6b4aca43b602ba0f1459de647af954769919c4714706be36af670a5f44c9c1"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1684a9bd9077e922300ecd48003ddae7a7474e0412bea38d4631443a91d61077"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebb78745273e51b9832ef90c0898501006670d6e059f2cdb0e999494eb1450c2"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3adeef150d528ded2a8e734ebf9ae2e658f4c49bf413f5f157a470e17a4a2e89"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57a7c87927a468e5a1dc60c17caf9597161d66457a34273ab1760219953f7f4c"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:efff27bd8cbe1f9bd127e7894942ccc20c857aa8b5a0327874f30201e5ce83d0"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a783cd344113cb88c5ff7ca32f1f16532a6f2142185147822187913eb989f739"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:705227dccbe96ab02c7cb2c43e1228e2826e7ead880bb19ec94ef279e9555b5b"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:34c09b43bd538bf6c4b891ecce94b6fa4f1f10663a8d4ca589a079a5018f6ed7"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a48f4f7fea9a51098b02209d90297ac324241bf37ff6be6d2b0149ab2bd51b37"}, + {file = "yarl-1.8.2-cp37-cp37m-win32.whl", hash = "sha256:0414fd91ce0b763d4eadb4456795b307a71524dbacd015c657bb2a39db2eab89"}, + {file = "yarl-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:d881d152ae0007809c2c02e22aa534e702f12071e6b285e90945aa3c376463c5"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5df5e3d04101c1e5c3b1d69710b0574171cc02fddc4b23d1b2813e75f35a30b1"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7a66c506ec67eb3159eea5096acd05f5e788ceec7b96087d30c7d2865a243918"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b4fa2606adf392051d990c3b3877d768771adc3faf2e117b9de7eb977741229"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e21fb44e1eff06dd6ef971d4bdc611807d6bd3691223d9c01a18cec3677939e"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93202666046d9edadfe9f2e7bf5e0782ea0d497b6d63da322e541665d65a044e"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc77086ce244453e074e445104f0ecb27530d6fd3a46698e33f6c38951d5a0f1"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dd68a92cab699a233641f5929a40f02a4ede8c009068ca8aa1fe87b8c20ae3"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b372aad2b5f81db66ee7ec085cbad72c4da660d994e8e590c997e9b01e44901"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e6f3515aafe0209dd17fb9bdd3b4e892963370b3de781f53e1746a521fb39fc0"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dfef7350ee369197106805e193d420b75467b6cceac646ea5ed3049fcc950a05"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:728be34f70a190566d20aa13dc1f01dc44b6aa74580e10a3fb159691bc76909d"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ff205b58dc2929191f68162633d5e10e8044398d7a45265f90a0f1d51f85f72c"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf211dcad448a87a0d9047dc8282d7de59473ade7d7fdf22150b1d23859f946"}, + {file = "yarl-1.8.2-cp38-cp38-win32.whl", hash = "sha256:272b4f1599f1b621bf2aabe4e5b54f39a933971f4e7c9aa311d6d7dc06965165"}, + {file = "yarl-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:326dd1d3caf910cd26a26ccbfb84c03b608ba32499b5d6eeb09252c920bcbe4f"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f8ca8ad414c85bbc50f49c0a106f951613dfa5f948ab69c10ce9b128d368baf8"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:418857f837347e8aaef682679f41e36c24250097f9e2f315d39bae3a99a34cbf"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae0eec05ab49e91a78700761777f284c2df119376e391db42c38ab46fd662b77"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3edac5d74bb3209c418805bda77f973117836e1de7c000e9755e572c1f7850d0"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da65c3f263729e47351261351b8679c6429151ef9649bba08ef2528ff2c423b2"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef8fb25e52663a1c85d608f6dd72e19bd390e2ecaf29c17fb08f730226e3a08"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcd7bb1e5c45274af9a1dd7494d3c52b2be5e6bd8d7e49c612705fd45420b12d"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44ceac0450e648de86da8e42674f9b7077d763ea80c8ceb9d1c3e41f0f0a9951"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:97209cc91189b48e7cfe777237c04af8e7cc51eb369004e061809bcdf4e55220"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:48dd18adcf98ea9cd721a25313aef49d70d413a999d7d89df44f469edfb38a06"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e59399dda559688461762800d7fb34d9e8a6a7444fd76ec33220a926c8be1516"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d617c241c8c3ad5c4e78a08429fa49e4b04bedfc507b34b4d8dceb83b4af3588"}, + {file = "yarl-1.8.2-cp39-cp39-win32.whl", hash = "sha256:cb6d48d80a41f68de41212f3dfd1a9d9898d7841c8f7ce6696cf2fd9cb57ef83"}, + {file = "yarl-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:6604711362f2dbf7160df21c416f81fac0de6dbcf0b5445a2ef25478ecc4c778"}, + {file = "yarl-1.8.2.tar.gz", hash = "sha256:49d43402c6e3013ad0978602bf6bf5328535c48d192304b91b97a3c6790b1562"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[[package]] +name = "zipp" +version = "3.15.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.8" +content-hash = "9a402301e33bc7b47dc3fe8de479100af889e251761063431b5d6a10fa6ff182" diff --git a/python/pyproject.toml b/python/pyproject.toml new file mode 100644 index 000000000000..c3b638553594 --- /dev/null +++ b/python/pyproject.toml @@ -0,0 +1,25 @@ +[tool.poetry] +name = "semantic-kernel" +version = "0.1.0.dev" +description = "" +authors = ["Microsoft "] +readme = "README.md" +packages = [{include = "semantic_kernel"}] + +[tool.poetry.dependencies] +python = "^3.8" +numpy = "^1.24.2" +openai = "^0.27.0" + +[tool.poetry.group.dev.dependencies] +pre-commit = "^2.21.0" +black = {version = "^23.1.0", allow-prereleases = true} +ipykernel = "^6.21.1" +pytest = "7.2.0" + +[tool.isort] +profile = "black" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/python/requirements.txt b/python/requirements.txt new file mode 100644 index 000000000000..4f36ee68bd4d --- /dev/null +++ b/python/requirements.txt @@ -0,0 +1,2 @@ +openai==0.27.* +numpy==1.24.* diff --git a/python/semantic_kernel/__init__.py b/python/semantic_kernel/__init__.py new file mode 100644 index 000000000000..f20ab71438b6 --- /dev/null +++ b/python/semantic_kernel/__init__.py @@ -0,0 +1,42 @@ +# Copyright (c) Microsoft. All rights reserved. + +import semantic_kernel.memory as memory +from semantic_kernel.configuration.kernel_config import KernelConfig +from semantic_kernel.kernel_base import KernelBase +from semantic_kernel.kernel_builder import KernelBuilder +from semantic_kernel.kernel_extensions import KernelExtensions as extensions +from semantic_kernel.memory.null_memory import NullMemory +from semantic_kernel.orchestration.context_variables import ContextVariables +from semantic_kernel.orchestration.sk_context import SKContext +from semantic_kernel.orchestration.sk_function_base import SKFunctionBase +from semantic_kernel.semantic_functions.prompt_template import PromptTemplate +from semantic_kernel.semantic_functions.prompt_template_config import ( + PromptTemplateConfig, +) +from semantic_kernel.semantic_functions.semantic_function_config import ( + SemanticFunctionConfig, +) +from semantic_kernel.utils.null_logger import NullLogger +from semantic_kernel.utils.settings import openai_settings_from_dot_env + + +def create_kernel() -> KernelBase: + return KernelBuilder.create_kernel() + + +def kernel_builder() -> KernelBuilder: + return KernelBuilder(KernelConfig(), NullMemory(), NullLogger()) + + +__all__ = [ + "create_kernel", + "openai_settings_from_dot_env", + "extensions", + "PromptTemplateConfig", + "PromptTemplate", + "SemanticFunctionConfig", + "ContextVariables", + "SKFunctionBase", + "SKContext", + "memory", +] diff --git a/python/semantic_kernel/ai/ai_exception.py b/python/semantic_kernel/ai/ai_exception.py new file mode 100644 index 000000000000..0e1dcd9f1ce4 --- /dev/null +++ b/python/semantic_kernel/ai/ai_exception.py @@ -0,0 +1,60 @@ +# Copyright (c) Microsoft. All rights reserved. + +from enum import Enum +from typing import Optional + +from semantic_kernel.diagnostics.sk_exception import SKException + + +class AIException(SKException): + class ErrorCodes(Enum): + # Unknown error. + UnknownError = -1 + # No response. + NoResponse = 0 + # Access is denied. + AccessDenied = 1 + # The request was invalid. + InvalidRequest = 2 + # The content of the response was invalid. + InvalidResponseContent = 3 + # The request was throttled. + Throttling = 4 + # The request timed out. + RequestTimeout = 5 + # There was an error in the service. + ServiceError = 6 + # The requested model is not available. + ModelNotAvailable = 7 + # The supplied configuration was invalid. + InvalidConfiguration = 8 + # The function is not supported. + FunctionTypeNotSupported = 9 + + # The error code. + _error_code: ErrorCodes + + def __init__( + self, + error_code: ErrorCodes, + message: str, + inner_exception: Optional[Exception] = None, + ) -> None: + """Initializes a new instance of the AIException class. + + Arguments: + error_code {ErrorCodes} -- The error code. + message {str} -- The error message. + inner_exception {Exception} -- The inner exception. + """ + super().__init__(error_code, message, inner_exception) + self._error_code = error_code + + @property + def error_code(self) -> ErrorCodes: + """Gets the error code. + + Returns: + ErrorCodes -- The error code. + """ + return self._error_code diff --git a/python/semantic_kernel/ai/complete_request_settings.py b/python/semantic_kernel/ai/complete_request_settings.py new file mode 100644 index 000000000000..5c217ee6bb7e --- /dev/null +++ b/python/semantic_kernel/ai/complete_request_settings.py @@ -0,0 +1,39 @@ +# Copyright (c) Microsoft. All rights reserved. + +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, List + +if TYPE_CHECKING: + from semantic_kernel.semantic_functions.prompt_template_config import ( + PromptTemplateConfig, + ) + + +@dataclass +class CompleteRequestSettings: + temperature: float = 0.0 + top_p: float = 1.0 + presence_penalty: float = 0.0 + frequency_penalty: float = 0.0 + max_tokens: int = 256 + stop_sequences: List[str] = field(default_factory=list) + number_of_responses: int = 1 + logprobs: int = 0 + + def update_from_completion_config( + self, completion_config: "PromptTemplateConfig.CompletionConfig" + ): + self.temperature = completion_config.temperature + self.top_p = completion_config.top_p + self.presence_penalty = completion_config.presence_penalty + self.frequency_penalty = completion_config.frequency_penalty + self.max_tokens = completion_config.max_tokens + self.stop_sequences = completion_config.stop_sequences + + @staticmethod + def from_completion_config( + completion_config: "PromptTemplateConfig.CompletionConfig", + ) -> "CompleteRequestSettings": + settings = CompleteRequestSettings() + settings.update_from_completion_config(completion_config) + return settings diff --git a/python/semantic_kernel/ai/embeddings/embedding_generator_base.py b/python/semantic_kernel/ai/embeddings/embedding_generator_base.py new file mode 100644 index 000000000000..ef84709bdeeb --- /dev/null +++ b/python/semantic_kernel/ai/embeddings/embedding_generator_base.py @@ -0,0 +1,12 @@ +# Copyright (c) Microsoft. All rights reserved. + +from abc import ABC, abstractmethod +from typing import List + +from numpy import ndarray + + +class EmbeddingGeneratorBase(ABC): + @abstractmethod + async def generate_embeddings_async(self, texts: List[str]) -> ndarray: + pass diff --git a/python/semantic_kernel/ai/embeddings/embedding_index_base.py b/python/semantic_kernel/ai/embeddings/embedding_index_base.py new file mode 100644 index 000000000000..c5872f0a1baa --- /dev/null +++ b/python/semantic_kernel/ai/embeddings/embedding_index_base.py @@ -0,0 +1,20 @@ +# Copyright (c) Microsoft. All rights reserved. + +from abc import ABC, abstractmethod +from typing import List, Tuple + +from numpy import ndarray + +from semantic_kernel.memory.memory_record import MemoryRecord + + +class EmbeddingIndexBase(ABC): + @abstractmethod + async def get_nearest_matches_async( + self, + collection: str, + embedding: ndarray, + limit: int, + min_relevance_score: float, + ) -> List[Tuple[MemoryRecord, float]]: + pass diff --git a/python/semantic_kernel/ai/open_ai/services/azure_open_ai_config.py b/python/semantic_kernel/ai/open_ai/services/azure_open_ai_config.py new file mode 100644 index 000000000000..b7ece681f330 --- /dev/null +++ b/python/semantic_kernel/ai/open_ai/services/azure_open_ai_config.py @@ -0,0 +1,54 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.diagnostics.verify import Verify + + +# TODO: support for AAD auth. +class AzureOpenAIConfig: + """ + The Azure OpenAI configuration. + """ + + # Azure OpenAI deployment name, + # https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + deployment_name: str + # Azure OpenAI deployment URL, + # https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + endpoint: str + # Azure OpenAI API key, + # https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + api_key: str + # Azure OpenAI API version, + # https://learn.microsoft.com/azure/cognitive-services/openai/reference + api_version: str + + def __init__( + self, + deployment_name: str, + endpoint: str, + api_key: str, + api_version: str, + ) -> None: + """Initializes a new instance of the AzureOpenAIConfig class. + + Arguments: + deployment_name {str} -- Azure OpenAI deployment name, + https://learn.microsoft.com/azure/cognitive-services/openai/how-to/create-resource + endpoint {str} -- Azure OpenAI deployment URL, + https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + api_key {str} -- Azure OpenAI API key, + https://learn.microsoft.com/azure/cognitive-services/openai/quickstart + api_version {str} -- Azure OpenAI API version, + https://learn.microsoft.com/azure/cognitive-services/openai/reference + """ + Verify.not_empty(deployment_name, "The deployment name is empty") + Verify.not_empty(endpoint, "The endpoint is empty") + Verify.starts_with( + endpoint, "https://", "The endpoint URL must start with https://" + ) + Verify.not_empty(api_key, "The API key is empty") + + self.deployment_name = deployment_name + self.endpoint = endpoint + self.api_key = api_key + self.api_version = api_version diff --git a/python/semantic_kernel/ai/open_ai/services/azure_text_completion.py b/python/semantic_kernel/ai/open_ai/services/azure_text_completion.py new file mode 100644 index 000000000000..673cd0a36c8f --- /dev/null +++ b/python/semantic_kernel/ai/open_ai/services/azure_text_completion.py @@ -0,0 +1,5 @@ +# Copyright (c) Microsoft. All rights reserved. + + +class AzureTextCompletion: + pass diff --git a/python/semantic_kernel/ai/open_ai/services/open_ai_config.py b/python/semantic_kernel/ai/open_ai/services/open_ai_config.py new file mode 100644 index 000000000000..6e1dfe20c8e3 --- /dev/null +++ b/python/semantic_kernel/ai/open_ai/services/open_ai_config.py @@ -0,0 +1,41 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Optional + +from semantic_kernel.diagnostics.verify import Verify + + +# TODO: allow for overriding endpoints +class OpenAIConfig: + """ + The OpenAI configuration. + """ + + # OpenAI model name, see https://platform.openai.com/docs/models + model_id: str + # OpenAI API key, see https://platform.openai.com/account/api-keys + api_key: str + # OpenAI organization ID. This is usually optional unless your + # account belongs to multiple organizations. + org_id: Optional[str] + + def __init__( + self, model_id: str, api_key: str, org_id: Optional[str] = None + ) -> None: + """Initializes a new instance of the OpenAIConfig class. + + Arguments: + model_id {str} -- OpenAI model name, see + https://platform.openai.com/docs/models + api_key {str} -- OpenAI API key, see + https://platform.openai.com/account/api-keys + org_id {Optional[str]} -- OpenAI organization ID. + This is usually optional unless your + account belongs to multiple organizations. + """ + Verify.not_empty(model_id, "The model ID is empty") + Verify.not_empty(api_key, "The API key is empty") + + self.model_id = model_id + self.api_key = api_key + self.org_id = org_id diff --git a/python/semantic_kernel/ai/open_ai/services/open_ai_text_completion.py b/python/semantic_kernel/ai/open_ai/services/open_ai_text_completion.py new file mode 100644 index 000000000000..a1f6e4b02bee --- /dev/null +++ b/python/semantic_kernel/ai/open_ai/services/open_ai_text_completion.py @@ -0,0 +1,113 @@ +# Copyright (c) Microsoft. All rights reserved. + +from logging import Logger +from typing import Any, Optional + +from semantic_kernel.ai.ai_exception import AIException +from semantic_kernel.ai.complete_request_settings import CompleteRequestSettings +from semantic_kernel.ai.text_completion_client_base import TextCompletionClientBase +from semantic_kernel.diagnostics.verify import Verify +from semantic_kernel.utils.null_logger import NullLogger + + +class OpenAITextCompletion(TextCompletionClientBase): + _model_id: str + _api_key: str + _org_id: Optional[str] = None + _log: Logger + + def __init__( + self, + model_id: str, + api_key: str, + org_id: Optional[str] = None, + log: Optional[Logger] = None, + ) -> None: + """ + Initializes a new instance of the OpenAITextCompletion class. + + Arguments: + model_id {str} -- OpenAI model name, see + https://platform.openai.com/docs/models + api_key {str} -- OpenAI API key, see + https://platform.openai.com/account/api-keys + org_id {Optional[str]} -- OpenAI organization ID. + This is usually optional unless your + account belongs to multiple organizations. + """ + self._model_id = model_id + self._api_key = api_key + self._org_id = org_id + self._log = log if log is not None else NullLogger() + + async def complete_simple_async( + self, prompt: str, request_settings: CompleteRequestSettings + ) -> str: + """ + Completes the given prompt. Returns a single string completion. + Cannot return multiple completions. Cannot return logprobs. + + Arguments: + prompt {str} -- The prompt to complete. + request_settings {CompleteRequestSettings} -- The request settings. + + Returns: + str -- The completed text. + """ + import openai + + Verify.not_empty(prompt, "The prompt is empty") + Verify.not_null(request_settings, "The request settings cannot be empty") + + if request_settings.max_tokens < 1: + raise AIException( + AIException.ErrorCodes.InvalidRequest, + "The max tokens must be greater than 0, " + f"but was {request_settings.max_tokens}", + ) + + if request_settings.number_of_responses != 1: + raise AIException( + AIException.ErrorCodes.InvalidRequest, + "complete_simple_async only supports a single completion, " + f"but {request_settings.number_of_responses} were requested", + ) + + if request_settings.logprobs != 0: + raise AIException( + AIException.ErrorCodes.InvalidRequest, + "complete_simple_async does not support logprobs, " + f"but logprobs={request_settings.logprobs} was requested", + ) + + openai.api_key = self._api_key + if self._org_id is not None: + openai.organization = self._org_id + + try: + response: Any = await openai.Completion.acreate( + model=self._model_id, + prompt=prompt, + temperature=request_settings.temperature, + top_p=request_settings.top_p, + presence_penalty=request_settings.presence_penalty, + frequency_penalty=request_settings.frequency_penalty, + max_tokens=request_settings.max_tokens, + stop=( + request_settings.stop_sequences + if len(request_settings.stop_sequences) > 0 + else None + ), + ) + except Exception as ex: + raise AIException( + AIException.ErrorCodes.ServiceError, + "OpenAI service failed to complete the prompt", + ex, + ) + + # TODO: tracking on token counts/etc. + + return response.choices[0].text + + # TODO: complete w/ multiple... diff --git a/python/semantic_kernel/ai/open_ai/services/open_ai_text_embedding.py b/python/semantic_kernel/ai/open_ai/services/open_ai_text_embedding.py new file mode 100644 index 000000000000..ca67602b87ba --- /dev/null +++ b/python/semantic_kernel/ai/open_ai/services/open_ai_text_embedding.py @@ -0,0 +1,66 @@ +# Copyright (c) Microsoft. All rights reserved. + +from logging import Logger +from typing import Any, List, Optional + +from numpy import array, ndarray + +from semantic_kernel.ai.ai_exception import AIException +from semantic_kernel.ai.embeddings.embedding_generator_base import ( + EmbeddingGeneratorBase, +) +from semantic_kernel.utils.null_logger import NullLogger + + +class OpenAITextEmbedding(EmbeddingGeneratorBase): + _model_id: str + _api_key: str + _org_id: Optional[str] = None + _log: Logger + + def __init__( + self, + model_id: str, + api_key: str, + org_id: Optional[str] = None, + log: Optional[Logger] = None, + ) -> None: + """ + Initializes a new instance of the OpenAITextCompletion class. + + Arguments: + model_id {str} -- OpenAI model name, see + https://platform.openai.com/docs/models + api_key {str} -- OpenAI API key, see + https://platform.openai.com/account/api-keys + org_id {Optional[str]} -- OpenAI organization ID. + This is usually optional unless your + account belongs to multiple organizations. + """ + self._model_id = model_id + self._api_key = api_key + self._org_id = org_id + self._log = log if log is not None else NullLogger() + + async def generate_embeddings_async(self, texts: List[str]) -> ndarray: + import openai + + openai.api_key = self._api_key + if self._org_id is not None: + openai.organization = self._org_id + + try: + response: Any = await openai.Embedding.acreate( + model=self._model_id, + input=texts, + ) + + # make numpy arrays from the response + raw_embeddings = [array(x["embedding"]) for x in response["data"]] + return array(raw_embeddings) + except Exception as ex: + raise AIException( + AIException.ErrorCodes.ServiceError, + "OpenAI service failed to generate embeddings", + ex, + ) diff --git a/python/semantic_kernel/ai/text_completion_client_base.py b/python/semantic_kernel/ai/text_completion_client_base.py new file mode 100644 index 000000000000..10a142ed71f6 --- /dev/null +++ b/python/semantic_kernel/ai/text_completion_client_base.py @@ -0,0 +1,19 @@ +# Copyright (c) Microsoft. All rights reserved. + +from abc import ABC, abstractmethod +from logging import Logger +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from semantic_kernel.ai.complete_request_settings import CompleteRequestSettings + + +class TextCompletionClientBase(ABC): + @abstractmethod + async def complete_simple_async( + self, + prompt: str, + settings: "CompleteRequestSettings", + logger: Logger, + ) -> str: + pass diff --git a/python/semantic_kernel/configuration/backend_config.py b/python/semantic_kernel/configuration/backend_config.py new file mode 100644 index 000000000000..30e8fb26812b --- /dev/null +++ b/python/semantic_kernel/configuration/backend_config.py @@ -0,0 +1,35 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Optional + +from semantic_kernel.ai.open_ai.services.azure_open_ai_config import AzureOpenAIConfig +from semantic_kernel.ai.open_ai.services.open_ai_config import OpenAIConfig +from semantic_kernel.configuration.backend_types import BackendType + + +class BackendConfig: + backend_type: BackendType = BackendType.Unknown + azure_open_ai: Optional[AzureOpenAIConfig] = None + open_ai: Optional[OpenAIConfig] = None + + def __init__( + self, + backend_type: BackendType, + azure_open_ai: Optional[AzureOpenAIConfig] = None, + open_ai: Optional[OpenAIConfig] = None, + ) -> None: + """ + Initializes a new instance of the BackendConfig class. + + Arguments: + backend_type {BackendType} -- The backend type. + + Keyword Arguments: + azure_open_ai {AzureOpenAIConfig} -- The Azure OpenAI + configuration. (default: {None}) + open_ai {OpenAIConfig} -- The OpenAI configuration. + (default: {None}) + """ + self.backend_type = backend_type + self.azure_open_ai = azure_open_ai + self.open_ai = open_ai diff --git a/python/semantic_kernel/configuration/backend_types.py b/python/semantic_kernel/configuration/backend_types.py new file mode 100644 index 000000000000..f7b05c34fb87 --- /dev/null +++ b/python/semantic_kernel/configuration/backend_types.py @@ -0,0 +1,9 @@ +# Copyright (c) Microsoft. All rights reserved. + +import enum + + +class BackendType(enum.Enum): + Unknown = -1 + AzureOpenAI = 0 + OpenAI = 1 diff --git a/python/semantic_kernel/configuration/kernel_config.py b/python/semantic_kernel/configuration/kernel_config.py new file mode 100644 index 000000000000..0c04889bafba --- /dev/null +++ b/python/semantic_kernel/configuration/kernel_config.py @@ -0,0 +1,274 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Callable, Dict, List, Optional + +from semantic_kernel.ai.open_ai.services.azure_open_ai_config import AzureOpenAIConfig +from semantic_kernel.ai.open_ai.services.open_ai_config import OpenAIConfig +from semantic_kernel.configuration.backend_config import BackendConfig +from semantic_kernel.configuration.backend_types import BackendType +from semantic_kernel.diagnostics.verify import Verify +from semantic_kernel.kernel_exception import KernelException +from semantic_kernel.reliability.pass_through_without_retry import ( + PassThroughWithoutRetry, +) +from semantic_kernel.reliability.retry_mechanism import RetryMechanism + + +class KernelConfig: + _completion_backends: Dict[str, BackendConfig] = {} + _embeddings_backends: Dict[str, BackendConfig] = {} + _default_completion_backend: Optional[str] = None + _default_embeddings_backend: Optional[str] = None + _retry_mechanism: RetryMechanism = PassThroughWithoutRetry() + + def add_azure_openai_completion_backend( + self, + name: str, + deployment_name: str, + endpoint: str, + api_key: str, + api_version: str = "2022-12-01", + overwrite: bool = False, + ) -> "KernelConfig": + Verify.not_empty(name, "The backend name is empty") + + if not overwrite and name in self._completion_backends: + raise KernelException( + KernelException.ErrorCodes.InvalidBackendConfiguration, + f"The completion backend cannot be added twice: {name}", + ) + + self._completion_backends[name] = BackendConfig( + backend_type=BackendType.AzureOpenAI, + azure_open_ai=AzureOpenAIConfig( + deployment_name, endpoint, api_key, api_version + ), + ) + + if self._default_completion_backend is None: + self._default_completion_backend = name + + return self + + def add_openai_completion_backend( + self, + name: str, + model_id: str, + api_key: str, + org_id: Optional[str] = None, + overwrite: bool = False, + ) -> "KernelConfig": + Verify.not_empty(name, "The backend name is empty") + + if not overwrite and name in self._completion_backends: + raise KernelException( + KernelException.ErrorCodes.InvalidBackendConfiguration, + f"The completion backend cannot be added twice: {name}", + ) + + self._completion_backends[name] = BackendConfig( + backend_type=BackendType.OpenAI, + open_ai=OpenAIConfig(model_id, api_key, org_id), + ) + + if self._default_completion_backend is None: + self._default_completion_backend = name + + return self + + def add_azure_open_ai_embeddings_backend( + self, + name: str, + deployment_name: str, + endpoint: str, + api_key: str, + api_version: str = "2022-12-01", + overwrite: bool = False, + ) -> "KernelConfig": + Verify.not_empty(name, "The backend name is empty") + + if not overwrite and name in self._embeddings_backends: + raise KernelException( + KernelException.ErrorCodes.InvalidBackendConfiguration, + f"The embeddings backend cannot be added twice: {name}", + ) + + self._embeddings_backends[name] = BackendConfig( + backend_type=BackendType.AzureOpenAI, + azure_open_ai=AzureOpenAIConfig( + deployment_name, endpoint, api_key, api_version + ), + ) + + if self._default_embeddings_backend is None: + self._default_embeddings_backend = name + + return self + + def add_open_ai_embeddings_backend( + self, + name: str, + model_id: str, + api_key: str, + org_id: Optional[str] = None, + overwrite: bool = False, + ) -> "KernelConfig": + Verify.not_empty(name, "The backend name is empty") + + if not overwrite and name in self._embeddings_backends: + raise KernelException( + KernelException.ErrorCodes.InvalidBackendConfiguration, + f"The embeddings backend cannot be added twice: {name}", + ) + + self._embeddings_backends[name] = BackendConfig( + backend_type=BackendType.OpenAI, + open_ai=OpenAIConfig(model_id, api_key, org_id), + ) + + if self._default_embeddings_backend is None: + self._default_embeddings_backend = name + + return self + + def has_completion_backend( + self, name: str, condition: Optional[Callable[[BackendConfig], bool]] = None + ) -> bool: + if condition is None: + return name in self._completion_backends + + return any( + [name == n and condition(v) for n, v in self._completion_backends.items()] + ) + + def has_embeddings_backend( + self, name: str, condition: Optional[Callable[[BackendConfig], bool]] = None + ) -> bool: + if condition is None: + return name in self._embeddings_backends + + return any( + [name == n and condition(v) for n, v in self._embeddings_backends.items()] + ) + + def set_retry_mechanism( + self, retry_mechanism: Optional[RetryMechanism] + ) -> "KernelConfig": + self._retry_mechanism = ( + retry_mechanism + if retry_mechanism is not None + else PassThroughWithoutRetry() + ) + return self + + def set_default_completion_backend(self, name: str) -> "KernelConfig": + if name not in self._completion_backends: + raise KernelException( + KernelException.ErrorCodes.BackendNotFound, + f"The completions backend doesn't exist: {name}", + ) + + self._default_completion_backend = name + return self + + @property + def default_completion_backend(self) -> Optional[str]: + return self._default_completion_backend + + def set_default_embeddings_backend(self, name: str) -> "KernelConfig": + if name not in self._embeddings_backends: + raise KernelException( + KernelException.ErrorCodes.BackendNotFound, + f"The embeddings backend doesn't exist: {name}", + ) + + self._default_embeddings_backend = name + return self + + @property + def default_embeddings_backend(self) -> Optional[str]: + return self._default_embeddings_backend + + def get_completion_backend(self, name: Optional[str]) -> BackendConfig: + if name is None or name.strip() == "": + if self._default_completion_backend is None: + raise KernelException( + KernelException.ErrorCodes.BackendNotFound, + f"Completion backend not found: {name}. " + f"No default backend available.", + ) + + return self._completion_backends[self._default_completion_backend] + + if name in self._completion_backends: + return self._completion_backends[name] + + if self._default_completion_backend is not None: + return self._completion_backends[self._default_completion_backend] + + raise KernelException( + KernelException.ErrorCodes.BackendNotFound, + f"Completion backend not found: {name}. " f"No default backend available.", + ) + + def get_embeddings_backend(self, name: Optional[str]) -> BackendConfig: + if name is None or name.strip() == "": + if self._default_embeddings_backend is None: + raise KernelException( + KernelException.ErrorCodes.BackendNotFound, + f"Embeddings backend not found: {name}. " + f"No default backend available.", + ) + + return self._embeddings_backends[self._default_embeddings_backend] + + if name in self._embeddings_backends: + return self._embeddings_backends[name] + + if self._default_embeddings_backend is not None: + return self._embeddings_backends[self._default_embeddings_backend] + + raise KernelException( + KernelException.ErrorCodes.BackendNotFound, + f"Embeddings backend not found: {name}. " f"No default backend available.", + ) + + def get_all_completion_backends(self) -> List[BackendConfig]: + return list(self._completion_backends.values()) + + def get_all_embeddings_backends(self) -> List[BackendConfig]: + return list(self._embeddings_backends.values()) + + def remove_completion_backend(self, name: str) -> None: + if name in self._completion_backends: + del self._completion_backends[name] + + if name == self._default_completion_backend: + self._default_completion_backend = ( + list(self._completion_backends.keys())[0] + if len(self._completion_backends) > 0 + else None + ) + + def remove_embeddings_backend(self, name: str) -> None: + if name in self._embeddings_backends: + del self._embeddings_backends[name] + + if name == self._default_embeddings_backend: + self._default_embeddings_backend = ( + list(self._embeddings_backends.keys())[0] + if len(self._embeddings_backends) > 0 + else None + ) + + def remove_all_completion_backends(self) -> None: + self._completion_backends.clear() + self._default_completion_backend = None + + def remove_all_embeddings_backends(self) -> None: + self._embeddings_backends.clear() + self._default_embeddings_backend = None + + def remove_all_backends(self) -> None: + self.remove_all_completion_backends() + self.remove_all_embeddings_backends() diff --git a/python/semantic_kernel/core_skills/__init__.py b/python/semantic_kernel/core_skills/__init__.py new file mode 100644 index 000000000000..0ba3fc5f9be8 --- /dev/null +++ b/python/semantic_kernel/core_skills/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.core_skills.text_memory_skill import TextMemorySkill + +__all__ = ["TextMemorySkill"] diff --git a/python/semantic_kernel/core_skills/text_memory_skill.py b/python/semantic_kernel/core_skills/text_memory_skill.py new file mode 100644 index 000000000000..6b6d482a6f45 --- /dev/null +++ b/python/semantic_kernel/core_skills/text_memory_skill.py @@ -0,0 +1,132 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.diagnostics.verify import Verify +from semantic_kernel.orchestration.sk_context import SKContext +from semantic_kernel.skill_definition import ( + sk_function, + sk_function_context_parameter, + sk_function_input, + sk_function_name, +) + + +class TextMemorySkill: + + COLLECTION_PARAM = "collection" + RELEVANCE_PARAM = "relevance" + KEY_PARAM = "key" + DEFAULT_COLLECTION = "generic" + DEFAULT_RELEVANCE = 0.75 + + # @staticmethod + @sk_function("Recall a fact from the long term memory") + @sk_function_name("recall") + @sk_function_input(description="The information to retrieve") + @sk_function_context_parameter( + name=COLLECTION_PARAM, + description="The collection to search for information", + default_value=DEFAULT_COLLECTION, + ) + @sk_function_context_parameter( + name=RELEVANCE_PARAM, + description="The relevance score, from 0.0 to 1.0; 1.0 means perfect match", + default_value=DEFAULT_RELEVANCE, + ) + async def recall_async(ask: str, context: SKContext) -> str: + """ + Recall a fact from the long term memory. + + Example: + sk_context["input"] = "what is the capital of France?" + {{memory.recall $input}} => "Paris" + + Args: + ask -- The question to ask the memory + context -- Contains the 'collection' to search for information + and the 'relevance' score to use when searching + + Returns: + The nearest item from the memory store + """ + Verify.not_null(context.variables, "Context has no variables") + assert context.variables is not None # for type checker + Verify.not_null(context.memory, "Context has no memory") + assert context.memory is not None # for type checker + + collection = ( + context.variables[TextMemorySkill.COLLECTION_PARAM] + if context.variables.contains_key(TextMemorySkill.COLLECTION_PARAM) + else TextMemorySkill.DEFAULT_COLLECTION + ) + Verify.not_empty( + collection, "Memory collection not defined for TextMemorySkill" + ) + + relevance = ( + context.variables[TextMemorySkill.RELEVANCE_PARAM] + if context.variables.contains_key(TextMemorySkill.RELEVANCE_PARAM) + else TextMemorySkill.DEFAULT_RELEVANCE + ) + if relevance is None or str(relevance).strip() == "": + relevance = TextMemorySkill.DEFAULT_RELEVANCE + + results = await context.memory.search_async( + collection, ask, min_relevance_score=float(relevance) + ) + if results is None or len(results) == 0: + if context.log is not None: + context.log.warning(f"Memory not found in collection: {collection}") + return "" + + return results[0].text if results[0].text is not None else "" + + # @staticmethod + @sk_function("Save information to semantic memory") + @sk_function_name("save") + @sk_function_input(description="The information to save") + @sk_function_context_parameter( + name=COLLECTION_PARAM, + description="The collection to save the information", + default_value=DEFAULT_COLLECTION, + ) + @sk_function_context_parameter( + name=KEY_PARAM, + description="The unique key to associate with the information", + ) + async def save_async(text: str, context: SKContext): + """ + Save a fact to the long term memory. + + Example: + sk_context["input"] = "the capital of France is Paris" + sk_context[TextMemorySkill.KEY_PARAM] = "countryInfo1" + {{memory.save $input}} + + Args: + text -- The text to save to the memory + context -- Contains the 'collection' to save the information + and unique 'key' to associate with the information + """ + Verify.not_null(context.variables, "Context has no variables") + assert context.variables is not None # for type checker + Verify.not_null(context.memory, "Context has no memory") + assert context.memory is not None # for type checker + + collection = ( + context.variables[TextMemorySkill.COLLECTION_PARAM] + if context.variables.contains_key(TextMemorySkill.COLLECTION_PARAM) + else TextMemorySkill.DEFAULT_COLLECTION + ) + Verify.not_empty( + collection, "Memory collection not defined for TextMemorySkill" + ) + + key = ( + context.variables[TextMemorySkill.KEY_PARAM] + if context.variables.contains_key(TextMemorySkill.KEY_PARAM) + else None + ) + Verify.not_empty(key, "Memory key not defined for TextMemorySkill") + assert key is not None # for type checker + + await context.memory.save_information_async(collection, text=text, id=key) diff --git a/python/semantic_kernel/diagnostics/sk_exception.py b/python/semantic_kernel/diagnostics/sk_exception.py new file mode 100644 index 000000000000..b5743bb3229f --- /dev/null +++ b/python/semantic_kernel/diagnostics/sk_exception.py @@ -0,0 +1,29 @@ +# Copyright (c) Microsoft. All rights reserved. + +from enum import Enum +from typing import Optional + + +class SKException(Exception): + """The base class for all semantic kernel exceptions.""" + + def __init__( + self, + error_code: Enum, + message: Optional[str] = None, + inner_exception: Optional[Exception] = None, + ) -> None: + """Initializes a new instance of the SKException class. + + Arguments: + error_code {Enum} -- The error code. + message {str} -- The error message. + inner_exception {Exception} -- The inner exception. + """ + super().__init__(self._build_message(error_code, message), inner_exception) + + def _build_message(self, error_code: Enum, message: Optional[str]) -> str: + if message is None: + return error_code.name + else: + return f"{error_code.name}: {message}" diff --git a/python/semantic_kernel/diagnostics/validation_exception.py b/python/semantic_kernel/diagnostics/validation_exception.py new file mode 100644 index 000000000000..2a7a86889a5c --- /dev/null +++ b/python/semantic_kernel/diagnostics/validation_exception.py @@ -0,0 +1,50 @@ +# Copyright (c) Microsoft. All rights reserved. + +from enum import Enum +from typing import Optional + +from semantic_kernel.diagnostics.sk_exception import SKException + + +class ValidationException(SKException): + class ErrorCodes(Enum): + # Unknown error. + UnknownError = -1 + # Null value. + NullValue = 0 + # Empty value. + EmptyValue = 1 + # Out of range. + OutOfRange = 2 + # Missing prefix. + MissingPrefix = 3 + # Directory not found. + DirectoryNotFound = 4 + + # The error code. + _error_code: ErrorCodes + + def __init__( + self, + error_code: ErrorCodes, + message: str, + inner_exception: Optional[Exception] = None, + ) -> None: + """Initializes a new instance of the ValidationException class. + + Arguments: + error_code {ErrorCodes} -- The error code. + message {str} -- The error message. + inner_exception {Exception} -- The inner exception. + """ + super().__init__(error_code, message, inner_exception) + self._error_code = error_code + + @property + def error_code(self) -> ErrorCodes: + """Gets the error code. + + Returns: + ErrorCodes -- The error code. + """ + return self._error_code diff --git a/python/semantic_kernel/diagnostics/verify.py b/python/semantic_kernel/diagnostics/verify.py new file mode 100644 index 000000000000..219a535e1afb --- /dev/null +++ b/python/semantic_kernel/diagnostics/verify.py @@ -0,0 +1,105 @@ +# Copyright (c) Microsoft. All rights reserved. + +import os +import re +from typing import Any, Optional + +from semantic_kernel.diagnostics.validation_exception import ValidationException +from semantic_kernel.kernel_exception import KernelException + + +class Verify: + @staticmethod + def not_null(value: Optional[Any], message: str) -> None: + if value is not None: + return + + raise ValidationException(ValidationException.ErrorCodes.NullValue, message) + + @staticmethod + def not_empty(value: Optional[str], message: str) -> None: + Verify.not_null(value, message) + if value.strip() != "": # type: ignore + return + + raise ValidationException(ValidationException.ErrorCodes.EmptyValue, message) + + @staticmethod + def valid_skill_name(value: Optional[str]) -> None: + Verify.not_empty(value, "The skill name cannot be empty") + + SKILL_NAME_REGEX = r"^[0-9A-Za-z_]*$" + if re.match(SKILL_NAME_REGEX, value): # type: ignore + return + + raise KernelException( + KernelException.ErrorCodes.InvalidFunctionDescription, + "A skill name can contain only latin letters, digits 0-9, " + f"and underscores: '{value}' is not a valid skill name.", + ) + + @staticmethod + def valid_function_name(value: Optional[str]) -> None: + Verify.not_empty(value, "The function name cannot be empty") + + FUNCTION_NAME_REGEX = r"^[0-9A-Za-z_]*$" + if re.match(FUNCTION_NAME_REGEX, value): # type: ignore + return + + raise KernelException( + KernelException.ErrorCodes.InvalidFunctionDescription, + "A function name can contain only latin letters, digits 0-9, " + f"and underscores: '{value}' is not a valid function name.", + ) + + @staticmethod + def valid_function_param_name(value: Optional[str]) -> None: + Verify.not_empty(value, "The function parameter name cannot be empty") + + FUNCTION_PARAM_NAME_REGEX = r"^[0-9A-Za-z_]*$" + if re.match(FUNCTION_PARAM_NAME_REGEX, value): # type: ignore + return + + raise KernelException( + KernelException.ErrorCodes.InvalidFunctionDescription, + "A function parameter name can contain only latin letters, " + f"digits 0-9, and underscores: '{value}' is not a valid " + f"function parameter name.", + ) + + @staticmethod + def starts_with(text: str, prefix: Optional[str], message: str) -> None: + Verify.not_empty(text, "The text to verify cannot be empty") + Verify.not_null(prefix, "The prefix to verify cannot be null") + + if text.startswith(prefix): # type: ignore + return + + raise ValidationException(ValidationException.ErrorCodes.MissingPrefix, message) + + @staticmethod + def directory_exists(path: str): + Verify.not_empty(path, "The path to verify cannot be empty") + + if os.path.isdir(path): + return + + raise ValidationException( + ValidationException.ErrorCodes.DirectoryNotFound, + f"Directory not found: '{path}'", + ) + + @staticmethod + def parameters_unique(parameters: list): # TODO: ParameterView + name_set = set() + + for parameter in parameters: + if parameter.name in name_set: + raise KernelException( + KernelException.ErrorCodes.InvalidFunctionDescription, + "The function has two or more parameters " + f"with the same name '{parameter.name}'", + ) + + Verify.not_empty(parameter.name, "The function parameter name is empty") + name_set.add(parameter.name.lower()) diff --git a/python/semantic_kernel/kernel.py b/python/semantic_kernel/kernel.py new file mode 100644 index 000000000000..6a2d772fb7c2 --- /dev/null +++ b/python/semantic_kernel/kernel.py @@ -0,0 +1,281 @@ +# Copyright (c) Microsoft. All rights reserved. + +from logging import Logger +from typing import Any, Dict, Optional + +from semantic_kernel.ai.ai_exception import AIException +from semantic_kernel.ai.complete_request_settings import CompleteRequestSettings +from semantic_kernel.ai.open_ai.services.azure_text_completion import ( + AzureTextCompletion, +) +from semantic_kernel.ai.open_ai.services.open_ai_text_completion import ( + OpenAITextCompletion, +) +from semantic_kernel.configuration.backend_types import BackendType +from semantic_kernel.configuration.kernel_config import KernelConfig +from semantic_kernel.diagnostics.verify import Verify +from semantic_kernel.kernel_base import KernelBase +from semantic_kernel.kernel_exception import KernelException +from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase +from semantic_kernel.orchestration.context_variables import ContextVariables +from semantic_kernel.orchestration.sk_context import SKContext +from semantic_kernel.orchestration.sk_function import SKFunction +from semantic_kernel.orchestration.sk_function_base import SKFunctionBase +from semantic_kernel.semantic_functions.semantic_function_config import ( + SemanticFunctionConfig, +) +from semantic_kernel.skill_definition.read_only_skill_collection_base import ( + ReadOnlySkillCollectionBase, +) +from semantic_kernel.skill_definition.skill_collection import SkillCollection +from semantic_kernel.skill_definition.skill_collection_base import SkillCollectionBase +from semantic_kernel.template_engine.prompt_template_engine_base import ( + PromptTemplateEngineBase, +) + + +class Kernel(KernelBase): + _log: Logger + _config: KernelConfig + _skill_collection: SkillCollectionBase + _prompt_template_engine: PromptTemplateEngineBase + _memory: SemanticTextMemoryBase + + def __init__( + self, + skill_collection: SkillCollectionBase, + prompt_template_engine: PromptTemplateEngineBase, + memory: SemanticTextMemoryBase, + config: KernelConfig, + log: Logger, + ) -> None: + self._log = log + self._config = config + self._skill_collection = skill_collection + self._prompt_template_engine = prompt_template_engine + self._memory = memory + + @property + def config(self) -> KernelConfig: + return self._config + + @property + def logger(self) -> Logger: + return self._log + + @property + def memory(self) -> SemanticTextMemoryBase: + return self._memory + + @property + def prompt_template_engine(self) -> PromptTemplateEngineBase: + return self._prompt_template_engine + + @property + def skills(self) -> ReadOnlySkillCollectionBase: + return self._skill_collection.read_only_skill_collection + + def register_semantic_function( + self, + skill_name: Optional[str], + function_name: str, + function_config: SemanticFunctionConfig, + ) -> SKFunctionBase: + if skill_name is None or skill_name == "": + skill_name = SkillCollection.GLOBAL_SKILL + assert skill_name is not None # for type checker + + Verify.valid_skill_name(skill_name) + Verify.valid_function_name(function_name) + + function = self._create_semantic_function( + skill_name, function_name, function_config + ) + self._skill_collection.add_semantic_function(function) + + return function + + async def run_async(self, *functions: Any) -> SKContext: + return await self.run_on_vars_async(ContextVariables(), *functions) + + async def run_on_str_async(self, input_str: str, *functions: Any) -> SKContext: + return await self.run_on_vars_async(ContextVariables(input_str), *functions) + + async def run_on_vars_async( + self, input_vars: ContextVariables, *functions: Any + ) -> SKContext: + context = SKContext( + input_vars, + self._memory, + self._skill_collection.read_only_skill_collection, + self._log, + ) + + pipeline_step = 0 + for func in functions: + assert isinstance(func, SKFunctionBase), ( + "All func arguments to Kernel.run*(inputs, func1, func2, ...) " + "must be SKFunctionBase instances" + ) + + if context.error_occurred: + self._log.error( + f"Something went wrong in pipeline step {pipeline_step}. " + f"Error description: '{context.last_error_description}'" + ) + return context + + pipeline_step += 1 + + try: + context = await func.invoke_async(input=None, context=context) + + if context.error_occurred: + self._log.error( + f"Something went wrong in pipeline step {pipeline_step}. " + f"During function invocation: '{func.skill_name}.{func.name}'. " + f"Error description: '{context.last_error_description}'" + ) + return context + except Exception as ex: + self._log.error( + f"Something went wrong in pipeline step {pipeline_step}. " + f"During function invocation: '{func.skill_name}.{func.name}'. " + f"Error description: '{str(ex)}'" + ) + context.fail(str(ex), ex) + return context + + return context + + def func(self, skill_name: str, function_name: str) -> SKFunctionBase: + if self.skills.has_native_function(skill_name, function_name): + return self.skills.get_native_function(skill_name, function_name) + + return self.skills.get_semantic_function(skill_name, function_name) + + def register_memory(self, memory: SemanticTextMemoryBase) -> None: + self._memory = memory + + def create_new_context(self) -> SKContext: + return SKContext( + ContextVariables(), + self._memory, + self.skills, + self._log, + ) + + def import_skill( + self, skill_instance: Any, skill_name: str = "" + ) -> Dict[str, SKFunctionBase]: + if skill_name.strip() == "": + skill_name = SkillCollection.GLOBAL_SKILL + self._log.debug(f"Importing skill {skill_name} into the global namespace") + else: + self._log.debug(f"Importing skill {skill_name}") + + functions = [] + # Read every method from the skill instance + for candidate in skill_instance.__dict__.values(): + # We're looking for a @staticmethod + if not isinstance(candidate, staticmethod): + continue + candidate = candidate.__func__ + + # If the method is a semantic function, register it + if hasattr(candidate, "__sk_function_name__"): + functions.append( + SKFunction.from_native_method(candidate, skill_name, self.logger) + ) + + self.logger.debug(f"Methods imported: {len(functions)}") + + # Uniqueness check on function names + function_names = [f.name for f in functions] + if len(function_names) != len(set(function_names)): + raise KernelException( + KernelException.ErrorCodes.FunctionOverloadNotSupported, + "Overloaded functions are not supported, " + "please differentiate function names.", + ) + + skill = {} + for function in functions: + function.set_default_skill_collection(self.skills) + self._skill_collection.add_native_function(function) + skill[function.name] = function + + return skill + + def _create_semantic_function( + self, + skill_name: str, + function_name: str, + function_config: SemanticFunctionConfig, + ) -> SKFunctionBase: + function_type = function_config.prompt_template_config.type + if not function_type == "completion": + raise AIException( + AIException.ErrorCodes.FunctionTypeNotSupported, + f"Function type not supported: {function_type}", + ) + + function = SKFunction.from_semantic_config( + skill_name, function_name, function_config + ) + function.request_settings.update_from_completion_config( + function_config.prompt_template_config.completion + ) + + # Connect the function to the current kernel skill + # collection, in case the function is invoked manually + # without a context and without a way to find other functions. + function.set_default_skill_collection(self.skills) + + # TODO: allow to postpone this (use lazy init) + # allow to create semantic functions without + # a default backend + backend = self._config.get_completion_backend( + function_config.prompt_template_config.default_backends[0] + if len(function_config.prompt_template_config.default_backends) > 0 + else None + ) + + function.set_ai_configuration( + CompleteRequestSettings.from_completion_config( + function_config.prompt_template_config.completion + ) + ) + + if backend.backend_type == BackendType.AzureOpenAI: + Verify.not_null( + backend.azure_open_ai, "Azure OpenAI configuration is missing" + ) + function.set_ai_backend( + lambda: AzureTextCompletion( + backend.azure_open_ai.deployment_name, # type: ignore + backend.azure_open_ai.endpoint, # type: ignore + backend.azure_open_ai.api_key, # type: ignore + backend.azure_open_ai.api_version, # type: ignore + self._log, + ) + ) + elif backend.backend_type == BackendType.OpenAI: + Verify.not_null(backend.open_ai, "OpenAI configuration is missing") + function.set_ai_backend( + lambda: OpenAITextCompletion( + backend.open_ai.model_id, # type: ignore + backend.open_ai.api_key, # type: ignore + backend.open_ai.org_id, # type: ignore + self._log, + ) + ) + else: + raise AIException( + AIException.ErrorCodes.InvalidConfiguration, + f"Unknown/unsupported backend type: {backend.backend_type.name}, " + f"unable to prepare semantic function. Function description: " + f"{function_config.prompt_template_config.description}", + ) + + return function diff --git a/python/semantic_kernel/kernel_base.py b/python/semantic_kernel/kernel_base.py new file mode 100644 index 000000000000..2cdbbd0eb089 --- /dev/null +++ b/python/semantic_kernel/kernel_base.py @@ -0,0 +1,88 @@ +# Copyright (c) Microsoft. All rights reserved. + +from abc import ABC, abstractmethod +from logging import Logger +from typing import Any, Dict, Optional + +from semantic_kernel.configuration.kernel_config import KernelConfig +from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase +from semantic_kernel.orchestration.context_variables import ContextVariables +from semantic_kernel.orchestration.sk_context import SKContext +from semantic_kernel.orchestration.sk_function_base import SKFunctionBase +from semantic_kernel.semantic_functions.semantic_function_config import ( + SemanticFunctionConfig, +) +from semantic_kernel.skill_definition.read_only_skill_collection_base import ( + ReadOnlySkillCollectionBase, +) +from semantic_kernel.template_engine.prompt_template_engine_base import ( + PromptTemplateEngineBase, +) + + +class KernelBase(ABC): + @property + @abstractmethod + def config(self) -> KernelConfig: + pass + + @property + @abstractmethod + def logger(self) -> Logger: + pass + + @property + @abstractmethod + def memory(self) -> SemanticTextMemoryBase: + pass + + @property + @abstractmethod + def prompt_template_engine(self) -> PromptTemplateEngineBase: + pass + + @property + @abstractmethod + def skills(self) -> ReadOnlySkillCollectionBase: + pass + + @abstractmethod + def register_semantic_function( + self, + skill_name: Optional[str], + function_name: str, + function_config: SemanticFunctionConfig, + ) -> SKFunctionBase: + pass + + @abstractmethod + def import_skill( + self, skill_instance: Any, skill_name: str = "" + ) -> Dict[str, SKFunctionBase]: + pass + + @abstractmethod + def register_memory(self, memory: SemanticTextMemoryBase) -> None: + pass + + @abstractmethod + async def run_on_str_async(self, input_str: str, *args: Any) -> SKContext: + pass + + @abstractmethod + async def run_on_vars_async( + self, input_vars: ContextVariables, *args: Any + ) -> SKContext: + pass + + @abstractmethod + async def run_async(self, *args: Any) -> SKContext: + pass + + @abstractmethod + def func(self, skill_name: str, function_name: str) -> SKFunctionBase: + pass + + @abstractmethod + def create_new_context(self) -> SKContext: + pass diff --git a/python/semantic_kernel/kernel_builder.py b/python/semantic_kernel/kernel_builder.py new file mode 100644 index 000000000000..37718217a19d --- /dev/null +++ b/python/semantic_kernel/kernel_builder.py @@ -0,0 +1,90 @@ +# Copyright (c) Microsoft. All rights reserved. + +from logging import Logger +from typing import Callable, Optional + +from semantic_kernel.configuration.kernel_config import KernelConfig +from semantic_kernel.diagnostics.verify import Verify +from semantic_kernel.kernel import Kernel +from semantic_kernel.kernel_base import KernelBase +from semantic_kernel.kernel_extensions import KernelExtensions +from semantic_kernel.memory.memory_store_base import MemoryStoreBase +from semantic_kernel.memory.null_memory import NullMemory +from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase +from semantic_kernel.skill_definition.skill_collection import SkillCollection +from semantic_kernel.template_engine.prompt_template_engine import PromptTemplateEngine +from semantic_kernel.utils.null_logger import NullLogger + + +class KernelBuilder: + _config: KernelConfig + _memory: SemanticTextMemoryBase + _memory_storage: Optional[MemoryStoreBase] + _log: Logger + + def __init__( + self, config: KernelConfig, memory: SemanticTextMemoryBase, log: Logger + ) -> None: + self._config = config + self._memory = memory + self._memory_storage = None + self._log = log + + def with_configuration(self, config: KernelConfig) -> "KernelBuilder": + Verify.not_null(config, "The configuration instance provided is None") + self._config = config + return self + + def with_memory(self, memory: SemanticTextMemoryBase) -> "KernelBuilder": + Verify.not_null(memory, "The memory instance provided is None") + self._memory = memory + return self + + def with_memory_storage(self, storage: MemoryStoreBase) -> "KernelBuilder": + Verify.not_null(storage, "The memory storage instance provided is None") + self._memory_storage = storage + return self + + def with_logger(self, log: Logger) -> "KernelBuilder": + Verify.not_null(log, "The logger instance provided is None") + self._log = log + return self + + def configure( + self, config_func: Callable[[KernelConfig], KernelConfig] + ) -> "KernelBuilder": + self._config = config_func(self._config) + return self + + def build(self) -> KernelBase: + instance = Kernel( + SkillCollection(self._log), + PromptTemplateEngine(self._log), + self._memory, + self._config, + self._log, + ) + + if self._memory_storage is not None: + KernelExtensions.use_memory(instance, self._memory_storage) + + return instance + + @staticmethod + def create_kernel( + config: Optional[KernelConfig] = None, + log: Optional[Logger] = None, + memory: Optional[SemanticTextMemoryBase] = None, + ) -> KernelBase: + builder = KernelBuilder(KernelConfig(), NullMemory(), NullLogger()) + + if config is not None: + builder = builder.with_configuration(config) + + if log is not None: + builder = builder.with_logger(log) + + if memory is not None: + builder = builder.with_memory(memory) + + return builder.build() diff --git a/python/semantic_kernel/kernel_exception.py b/python/semantic_kernel/kernel_exception.py new file mode 100644 index 000000000000..7c73a1a74552 --- /dev/null +++ b/python/semantic_kernel/kernel_exception.py @@ -0,0 +1,58 @@ +# Copyright (c) Microsoft. All rights reserved. + +from enum import Enum +from typing import Optional + +from semantic_kernel.diagnostics.sk_exception import SKException + + +class KernelException(SKException): + class ErrorCodes(Enum): + # Unknown error. + UnknownError = -1 + # Invalid function description. + InvalidFunctionDescription = 0 + # Function overload not supported. + FunctionOverloadNotSupported = 1 + # Function not available. + FunctionNotAvailable = 2 + # Function type not supported. + FunctionTypeNotSupported = 3 + # Invalid function type. + InvalidFunctionType = 4 + # Invalid backend configuration. + InvalidBackendConfiguration = 5 + # Backend not found. + BackendNotFound = 6 + # Skill collection not set. + SkillCollectionNotSet = 7 + # Ambiguous implementation. + AmbiguousImplementation = 8 + + # The error code. + _error_code: ErrorCodes + + def __init__( + self, + error_code: ErrorCodes, + message: str, + inner_exception: Optional[Exception] = None, + ) -> None: + """Initializes a new instance of the KernelError class. + + Arguments: + error_code {ErrorCodes} -- The error code. + message {str} -- The error message. + inner_exception {Exception} -- The inner exception. + """ + super().__init__(error_code, message, inner_exception) + self._error_code = error_code + + @property + def error_code(self) -> ErrorCodes: + """Gets the error code. + + Returns: + ErrorCodes -- The error code. + """ + return self._error_code diff --git a/python/semantic_kernel/kernel_extensions/__init__.py b/python/semantic_kernel/kernel_extensions/__init__.py new file mode 100644 index 000000000000..d793a26e6202 --- /dev/null +++ b/python/semantic_kernel/kernel_extensions/__init__.py @@ -0,0 +1,54 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import TYPE_CHECKING, List, Optional + +from semantic_kernel.ai.embeddings.embedding_generator_base import ( + EmbeddingGeneratorBase, +) +from semantic_kernel.kernel_base import KernelBase +from semantic_kernel.kernel_extensions.inline_function_definitions import ( + create_semantic_function, +) +from semantic_kernel.kernel_extensions.memory_configuration import use_memory +from semantic_kernel.memory.memory_store_base import MemoryStoreBase + +if TYPE_CHECKING: + from semantic_kernel.orchestration.sk_function_base import SKFunctionBase + + +class KernelExtensions: + @staticmethod + def create_semantic_function( + kernel: KernelBase, + prompt_template: str, + function_name: Optional[str] = None, + skill_name: Optional[str] = None, + description: Optional[str] = None, + max_tokens: int = 256, + temperature: float = 0.0, + top_p: float = 1.0, + presence_penalty: float = 0.0, + frequency_penalty: float = 0.0, + stop_sequences: Optional[List[str]] = None, + ) -> "SKFunctionBase": + return create_semantic_function( + kernel, + prompt_template, + function_name, + skill_name, + description, + max_tokens, + temperature, + top_p, + presence_penalty, + frequency_penalty, + stop_sequences, + ) + + @staticmethod + def use_memory( + kernel: KernelBase, + storage: MemoryStoreBase, + embeddings_generator: Optional[EmbeddingGeneratorBase] = None, + ) -> None: + use_memory(kernel, storage, embeddings_generator) diff --git a/python/semantic_kernel/kernel_extensions/import_semantic_skill_from_directory.py b/python/semantic_kernel/kernel_extensions/import_semantic_skill_from_directory.py new file mode 100644 index 000000000000..dfd6d3c12cb1 --- /dev/null +++ b/python/semantic_kernel/kernel_extensions/import_semantic_skill_from_directory.py @@ -0,0 +1,60 @@ +# Copyright (c) Microsoft. All rights reserved. + +import glob +import os +from typing import Dict + +from semantic_kernel.diagnostics.verify import Verify +from semantic_kernel.kernel_base import KernelBase +from semantic_kernel.orchestration.sk_function_base import SKFunctionBase +from semantic_kernel.semantic_functions.prompt_template import PromptTemplate +from semantic_kernel.semantic_functions.prompt_template_config import ( + PromptTemplateConfig, +) +from semantic_kernel.semantic_functions.semantic_function_config import ( + SemanticFunctionConfig, +) + + +def import_semantic_skill_from_directory( + kernel: KernelBase, parent_directory: str, skill_directory_name: str +) -> Dict[str, SKFunctionBase]: + CONFIG_FILE = "config.json" + PROMPT_FILE = "skprompt.txt" + + Verify.valid_skill_name(skill_directory_name) + skill_directory = os.path.join(parent_directory, skill_directory_name) + skill_directory = os.path.abspath(skill_directory) + Verify.directory_exists(skill_directory) + + skill = {} + + directories = glob.glob(skill_directory + "/*/") + for directory in directories: + dir_name = os.path.dirname(directory) + function_name = os.path.basename(dir_name) + prompt_path = os.path.join(directory, PROMPT_FILE) + + # Continue only if the prompt template exists + if not os.path.exists(prompt_path): + continue + + config = PromptTemplateConfig() + config_path = os.path.join(directory, CONFIG_FILE) + with open(config_path, "r") as config_file: + config.from_json(config_file.read()) + + # Load Prompt Template + with open(prompt_path, "r") as prompt_file: + template = PromptTemplate( + prompt_file.read(), kernel.prompt_template_engine, config + ) + + # Prepare lambda wrapping AI logic + function_config = SemanticFunctionConfig(config, template) + + skill[function_name] = kernel.register_semantic_function( + skill_directory_name, function_name, function_config + ) + + return skill diff --git a/python/semantic_kernel/kernel_extensions/inline_function_definitions.py b/python/semantic_kernel/kernel_extensions/inline_function_definitions.py new file mode 100644 index 000000000000..d2bf6e3fa42a --- /dev/null +++ b/python/semantic_kernel/kernel_extensions/inline_function_definitions.py @@ -0,0 +1,63 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import TYPE_CHECKING, List, Optional +from uuid import uuid4 + +from semantic_kernel.diagnostics.verify import Verify +from semantic_kernel.kernel_base import KernelBase +from semantic_kernel.semantic_functions.prompt_template import PromptTemplate +from semantic_kernel.semantic_functions.prompt_template_config import ( + PromptTemplateConfig, +) +from semantic_kernel.semantic_functions.semantic_function_config import ( + SemanticFunctionConfig, +) + +if TYPE_CHECKING: + from semantic_kernel.orchestration.sk_function_base import SKFunctionBase + + +def create_semantic_function( + kernel: KernelBase, + prompt_template: str, + function_name: Optional[str] = None, + skill_name: Optional[str] = None, + description: Optional[str] = None, + max_tokens: int = 256, + temperature: float = 0.0, + top_p: float = 1.0, + presence_penalty: float = 0.0, + frequency_penalty: float = 0.0, + stop_sequences: Optional[List[str]] = None, +) -> "SKFunctionBase": + function_name = ( + function_name + if function_name is not None + else f"f_{str(uuid4()).replace('-', '_')}" + ) + + config = PromptTemplateConfig( + description=( + description + if description is not None + else "Generic function, unknown purpose" + ), + type="completion", + completion=PromptTemplateConfig.CompletionConfig( + temperature, + top_p, + presence_penalty, + frequency_penalty, + max_tokens, + stop_sequences if stop_sequences is not None else [], + ), + ) + + Verify.valid_function_name(function_name) + if skill_name is not None: + Verify.valid_skill_name(skill_name) + + template = PromptTemplate(prompt_template, kernel.prompt_template_engine, config) + function_config = SemanticFunctionConfig(config, template) + + return kernel.register_semantic_function(skill_name, function_name, function_config) diff --git a/python/semantic_kernel/kernel_extensions/memory_configuration.py b/python/semantic_kernel/kernel_extensions/memory_configuration.py new file mode 100644 index 000000000000..2d4acbfbb5b6 --- /dev/null +++ b/python/semantic_kernel/kernel_extensions/memory_configuration.py @@ -0,0 +1,51 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Optional + +from semantic_kernel.ai.embeddings.embedding_generator_base import ( + EmbeddingGeneratorBase, +) +from semantic_kernel.ai.open_ai.services.open_ai_text_embedding import ( + OpenAITextEmbedding, +) +from semantic_kernel.configuration.backend_types import BackendType +from semantic_kernel.diagnostics.verify import Verify +from semantic_kernel.kernel_base import KernelBase +from semantic_kernel.memory.memory_store_base import MemoryStoreBase +from semantic_kernel.memory.semantic_text_memory import SemanticTextMemory + + +def use_memory( + kernel: KernelBase, + storage: MemoryStoreBase, + embeddings_generator: Optional[EmbeddingGeneratorBase] = None, +) -> None: + if embeddings_generator is None: + backend_label = kernel.config.default_embeddings_backend + Verify.not_empty(backend_label, "The embedding backend label is empty") + + embeddings_backend_config = kernel.config.get_embeddings_backend(backend_label) + Verify.not_null( + embeddings_backend_config, + f"AI configuration is missing for: {backend_label}", + ) + + if embeddings_backend_config.backend_type == BackendType.OpenAI: + assert embeddings_backend_config.open_ai is not None # for mypy + embeddings_generator = OpenAITextEmbedding( + embeddings_backend_config.open_ai.model_id, + embeddings_backend_config.open_ai.api_key, + embeddings_backend_config.open_ai.org_id, + kernel.logger, + ) + else: + # TODO: this + raise NotImplementedError( + f"Embeddings backend {embeddings_backend_config.backend_type} " + "is not yet implemented" + ) + + Verify.not_null(storage, "The storage instance provided is None") + Verify.not_null(embeddings_generator, "The embedding generator is None") + + kernel.register_memory(SemanticTextMemory(storage, embeddings_generator)) diff --git a/python/semantic_kernel/memory/__init__.py b/python/semantic_kernel/memory/__init__.py new file mode 100644 index 000000000000..fe15650baa27 --- /dev/null +++ b/python/semantic_kernel/memory/__init__.py @@ -0,0 +1,4 @@ +# Copyright (c) Microsoft. All rights reserved. +from semantic_kernel.memory.volatile_memory_store import VolatileMemoryStore + +__all__ = ["VolatileMemoryStore"] diff --git a/python/semantic_kernel/memory/memory_query_result.py b/python/semantic_kernel/memory/memory_query_result.py new file mode 100644 index 000000000000..41a478dbb65e --- /dev/null +++ b/python/semantic_kernel/memory/memory_query_result.py @@ -0,0 +1,43 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Optional + +from semantic_kernel.memory.memory_record import MemoryRecord + + +class MemoryQueryResult: + is_reference: bool + external_source_name: Optional[str] + id: str + description: Optional[str] + text: Optional[str] + relevance: float + + def __init__( + self, + is_reference: bool, + external_source_name: Optional[str], + id: str, + description: Optional[str], + text: Optional[str], + relevance: float, + ) -> None: + self.is_reference = is_reference + self.external_source_name = external_source_name + self.id = id + self.description = description + self.text = text + self.relevance = relevance + + @staticmethod + def from_memory_record( + record: MemoryRecord, relevance: float + ) -> "MemoryQueryResult": + return MemoryQueryResult( + is_reference=record.is_reference, + external_source_name=record.external_source_name, + id=record.id, + description=record.description, + text=record.text, + relevance=relevance, + ) diff --git a/python/semantic_kernel/memory/memory_record.py b/python/semantic_kernel/memory/memory_record.py new file mode 100644 index 000000000000..8df1d6183ef6 --- /dev/null +++ b/python/semantic_kernel/memory/memory_record.py @@ -0,0 +1,63 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Optional + +from numpy import ndarray + + +class MemoryRecord: + is_reference: bool + external_source_name: Optional[str] + id: str + description: Optional[str] + text: Optional[str] + _embedding: ndarray + + def __init__( + self, + is_reference: bool, + external_source_name: Optional[str], + id: str, + description: Optional[str], + text: Optional[str], + embedding: ndarray, + ) -> None: + self.is_reference = is_reference + self.external_source_name = external_source_name + self.id = id + self.description = description + self.text = text + self._embedding = embedding + + @property + def embedding(self) -> ndarray: + return self._embedding + + @staticmethod + def reference_record( + external_id: str, + source_name: str, + description: Optional[str], + embedding: ndarray, + ) -> "MemoryRecord": + return MemoryRecord( + is_reference=True, + external_source_name=source_name, + id=external_id, + description=description, + text=None, + embedding=embedding, + ) + + @staticmethod + def local_record( + id: str, text: str, description: Optional[str], embedding: ndarray + ) -> "MemoryRecord": + return MemoryRecord( + is_reference=False, + external_source_name=None, + id=id, + description=description, + text=text, + embedding=embedding, + ) diff --git a/python/semantic_kernel/memory/memory_store_base.py b/python/semantic_kernel/memory/memory_store_base.py new file mode 100644 index 000000000000..6de90ef24593 --- /dev/null +++ b/python/semantic_kernel/memory/memory_store_base.py @@ -0,0 +1,10 @@ +# Copyright (c) Microsoft. All rights reserved. + +from abc import ABC + +from semantic_kernel.ai.embeddings.embedding_index_base import EmbeddingIndexBase +from semantic_kernel.memory.storage.data_store_base import DataStoreBase + + +class MemoryStoreBase(DataStoreBase, EmbeddingIndexBase, ABC): + pass diff --git a/python/semantic_kernel/memory/null_memory.py b/python/semantic_kernel/memory/null_memory.py new file mode 100644 index 000000000000..f3a3a54dee6e --- /dev/null +++ b/python/semantic_kernel/memory/null_memory.py @@ -0,0 +1,43 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import List, Optional + +from semantic_kernel.memory.memory_query_result import MemoryQueryResult +from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase + + +class NullMemory(SemanticTextMemoryBase): + async def save_information_async( + self, collection: str, text: str, id: str, description: Optional[str] = None + ) -> None: + return None + + async def save_reference_async( + self, + collection: str, + text: str, + external_id: str, + external_source_name: str, + description: Optional[str] = None, + ) -> None: + return None + + async def get_async( + self, collection: str, query: str + ) -> Optional[MemoryQueryResult]: + return None + + async def search_async( + self, + collection: str, + query: str, + limit: int = 1, + min_relevance_score: float = 0.7, + ) -> List[MemoryQueryResult]: + return [] + + async def get_collections_async(self) -> List[str]: + return [] + + +NullMemory.instance = NullMemory() # type: ignore diff --git a/python/semantic_kernel/memory/semantic_text_memory.py b/python/semantic_kernel/memory/semantic_text_memory.py new file mode 100644 index 000000000000..5fc43a426cbf --- /dev/null +++ b/python/semantic_kernel/memory/semantic_text_memory.py @@ -0,0 +1,73 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import List, Optional + +from semantic_kernel.ai.embeddings.embedding_generator_base import ( + EmbeddingGeneratorBase, +) +from semantic_kernel.memory.memory_query_result import MemoryQueryResult +from semantic_kernel.memory.memory_record import MemoryRecord +from semantic_kernel.memory.memory_store_base import MemoryStoreBase +from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase + + +class SemanticTextMemory(SemanticTextMemoryBase): + _storage: MemoryStoreBase + _embeddings_generator: EmbeddingGeneratorBase + + def __init__( + self, storage: MemoryStoreBase, embeddings_generator: EmbeddingGeneratorBase + ) -> None: + self._storage = storage + self._embeddings_generator = embeddings_generator + + async def save_information_async( + self, + collection: str, + text: str, + id: str, + description: Optional[str] = None, + ) -> None: + embedding = await self._embeddings_generator.generate_embeddings_async([text]) + data = MemoryRecord.local_record(id, text, description, embedding) + + await self._storage.put_value_async(collection, id, data) + + async def save_reference_async( + self, + collection: str, + text: str, + external_id: str, + external_source_name: str, + description: Optional[str] = None, + ) -> None: + embedding = await self._embeddings_generator.generate_embeddings_async([text]) + MemoryRecord.reference_record( + external_id, external_source_name, description, embedding + ) + + async def get_async( + self, + collection: str, + query: str, + ) -> Optional[MemoryQueryResult]: + raise NotImplementedError() + + async def search_async( + self, + collection: str, + query: str, + limit: int = 1, + min_relevance_score: float = 0.7, + ) -> List[MemoryQueryResult]: + query_embedding = await self._embeddings_generator.generate_embeddings_async( + [query] + ) + results = await self._storage.get_nearest_matches_async( + collection, query_embedding, limit, min_relevance_score + ) + + return [MemoryQueryResult.from_memory_record(r[0], r[1]) for r in results] + + async def get_collections_async(self) -> List[str]: + raise NotImplementedError() diff --git a/python/semantic_kernel/memory/semantic_text_memory_base.py b/python/semantic_kernel/memory/semantic_text_memory_base.py new file mode 100644 index 000000000000..46590a1e0814 --- /dev/null +++ b/python/semantic_kernel/memory/semantic_text_memory_base.py @@ -0,0 +1,54 @@ +# Copyright (c) Microsoft. All rights reserved. + +from abc import ABC, abstractmethod +from typing import List, Optional + +from semantic_kernel.memory.memory_query_result import MemoryQueryResult + + +class SemanticTextMemoryBase(ABC): + @abstractmethod + async def save_information_async( + self, + collection: str, + text: str, + id: str, + description: Optional[str] = None, + # TODO: ctoken? + ) -> None: + pass + + @abstractmethod + async def save_reference_async( + self, + collection: str, + text: str, + external_id: str, + external_source_name: str, + description: Optional[str] = None, + # TODO: ctoken? + ) -> None: + pass + + @abstractmethod + async def get_async( + self, + collection: str, + query: str, # TODO: ctoken? + ) -> Optional[MemoryQueryResult]: + pass + + @abstractmethod + async def search_async( + self, + collection: str, + query: str, + limit: int = 1, + min_relevance_score: float = 0.7, + # TODO: ctoken? + ) -> List[MemoryQueryResult]: + pass + + @abstractmethod + async def get_collections_async(self) -> List[str]: + pass diff --git a/python/semantic_kernel/memory/storage/data_entry.py b/python/semantic_kernel/memory/storage/data_entry.py new file mode 100644 index 000000000000..e5e1c9e9e602 --- /dev/null +++ b/python/semantic_kernel/memory/storage/data_entry.py @@ -0,0 +1,36 @@ +# Copyright (c) Microsoft. All rights reserved. + +from datetime import datetime + +from semantic_kernel.memory.memory_record import MemoryRecord + + +class DataEntry: + _key: str + _value: MemoryRecord + _timestamp: datetime + + def __init__(self, key: str, value: MemoryRecord, timestamp: datetime) -> None: + self._key = key + self._value = value + self._timestamp = timestamp + + @property + def key(self) -> str: + return self._key + + @property + def value(self) -> MemoryRecord: + return self._value + + @value.setter + def value(self, value: MemoryRecord) -> None: + self._value = value + + @property + def timestamp(self) -> datetime: + return self._timestamp + + @timestamp.setter + def timestamp(self, timestamp: datetime) -> None: + self._timestamp = timestamp diff --git a/python/semantic_kernel/memory/storage/data_store_base.py b/python/semantic_kernel/memory/storage/data_store_base.py new file mode 100644 index 000000000000..efb4b936f595 --- /dev/null +++ b/python/semantic_kernel/memory/storage/data_store_base.py @@ -0,0 +1,36 @@ +# Copyright (c) Microsoft. All rights reserved. + +from abc import ABC, abstractmethod +from typing import Any, List, Optional + +from semantic_kernel.memory.storage.data_entry import DataEntry + + +class DataStoreBase(ABC): + @abstractmethod + async def get_collections_async(self) -> List[str]: + pass + + @abstractmethod + async def get_all_async(self, collection: str) -> List[Any]: + pass + + @abstractmethod + async def get_async(self, collection: str, key: str) -> Optional[DataEntry]: + pass + + @abstractmethod + async def put_async(self, collection: str, value: Any) -> DataEntry: + pass + + @abstractmethod + async def remove_async(self, collection: str, key: str) -> None: + pass + + @abstractmethod + async def get_value_async(self, collection: str, key: str) -> Any: + pass + + @abstractmethod + async def put_value_async(self, collection: str, key: str, value: Any) -> None: + pass diff --git a/python/semantic_kernel/memory/storage/volatile_data_store.py b/python/semantic_kernel/memory/storage/volatile_data_store.py new file mode 100644 index 000000000000..ff60234443d5 --- /dev/null +++ b/python/semantic_kernel/memory/storage/volatile_data_store.py @@ -0,0 +1,62 @@ +# Copyright (c) Microsoft. All rights reserved. + +from datetime import datetime +from typing import Dict, List, Optional + +from semantic_kernel.memory.memory_record import MemoryRecord +from semantic_kernel.memory.storage.data_entry import DataEntry +from semantic_kernel.memory.storage.data_store_base import DataStoreBase + + +class VolatileDataStore(DataStoreBase): + _store: Dict[str, Dict[str, DataEntry]] + + def __init__(self) -> None: + self._store = {} + + async def get_collections_async(self) -> List[str]: + return list(self._store.keys()) + + async def get_all_async(self, collection: str) -> List[DataEntry]: + if collection not in self._store: + return [] + + return list(self._store[collection].values()) + + async def get_async(self, collection: str, key: str) -> Optional[DataEntry]: + if collection not in self._store: + return None + if key not in self._store[collection]: + return None + + return self._store[collection][key] + + async def put_async(self, collection: str, value: DataEntry) -> DataEntry: + if collection not in self._store: + self._store[collection] = {} + + self._store[collection][value.key] = value + + return value + + async def remove_async(self, collection: str, key: str) -> None: + if collection not in self._store: + return + if key not in self._store[collection]: + return + + del self._store[collection][key] + + async def get_value_async(self, collection: str, key: str) -> MemoryRecord: + entry = await self.get_async(collection, key) + + if entry is None: + # TODO: equivalent here? + raise Exception(f"Key '{key}' not found in collection '{collection}'") + + return entry.value + + async def put_value_async( + self, collection: str, key: str, value: MemoryRecord + ) -> None: + await self.put_async(collection, DataEntry(key, value, datetime.now())) diff --git a/python/semantic_kernel/memory/volatile_memory_store.py b/python/semantic_kernel/memory/volatile_memory_store.py new file mode 100644 index 000000000000..5cd00ecf428a --- /dev/null +++ b/python/semantic_kernel/memory/volatile_memory_store.py @@ -0,0 +1,52 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import List, Tuple + +from numpy import array, linalg, ndarray + +from semantic_kernel.memory.memory_record import MemoryRecord +from semantic_kernel.memory.memory_store_base import MemoryStoreBase +from semantic_kernel.memory.storage.volatile_data_store import VolatileDataStore + + +class VolatileMemoryStore(VolatileDataStore, MemoryStoreBase): + def __init__(self) -> None: + super().__init__() + + async def get_nearest_matches_async( + self, + collection: str, + embedding: ndarray, + limit: int = 1, + min_relevance_score: float = 0.7, + ) -> List[Tuple[MemoryRecord, float]]: + if collection not in self._store: + return [] + + embedding_collection = list([x.value for x in self._store[collection].values()]) + # Convert the collection of embeddings into a numpy array (stacked) + embedding_array = array( + [x.embedding for x in embedding_collection], dtype=float + ) + embedding_array = embedding_array.reshape(embedding_array.shape[0], -1) + + # Use numpy to get the cosine similarity between the query + # embedding and all the embeddings in the collection + similarity_scores = ( + embedding.dot(embedding_array.T) + / (linalg.norm(embedding) * linalg.norm(embedding_array, axis=1)) + )[0] + + # Then, sort the results by the similarity score + sorted_results = sorted( + zip(embedding_collection, similarity_scores), + key=lambda x: x[1], + reverse=True, + ) + + # Then, filter out the results that are below the minimum relevance score + filtered_results = [x for x in sorted_results if x[1] >= min_relevance_score] + + # Then, take the top N results + top_results = filtered_results[:limit] + return top_results diff --git a/python/semantic_kernel/orchestration/context_variables.py b/python/semantic_kernel/orchestration/context_variables.py new file mode 100644 index 000000000000..ddfc0d8ea70a --- /dev/null +++ b/python/semantic_kernel/orchestration/context_variables.py @@ -0,0 +1,70 @@ +# Copyright (c) Microsoft. All rights reserved. + + +from typing import Dict, Tuple + +from semantic_kernel.diagnostics.verify import Verify + + +class ContextVariables: + _variables: Dict[str, str] = {} + _main_key = "input" + + def __init__(self, content: str = "") -> None: + self._variables[self._main_key] = content + + @property + def input(self) -> str: + return self._variables[self._main_key] + + def update(self, content: str) -> "ContextVariables": + self._variables[self._main_key] = content + return self + + def merge_or_overwrite( + self, new_context: "ContextVariables", merge: bool = True + ) -> "ContextVariables": + if not merge: + self._variables = {} + + self._variables.update(new_context._variables) + return self + + def set(self, name: str, value: str) -> "ContextVariables": + Verify.not_empty(name, "The variable name is empty") + name = name.lower() + + if value is not None: + self._variables[name] = value + else: + self._variables.pop(name, None) + + return self + + def get(self, name: str) -> Tuple[bool, str]: + name = name.lower() + if name in self._variables: + return True, self._variables[name] + + return False, "" + + def __getitem__(self, name: str) -> str: + name = name.lower() + return self._variables[name] + + def __setitem__(self, name: str, value: str) -> None: + Verify.not_empty(name, "The variable name is empty") + name = name.lower() + self._variables[name] = value + + def contains_key(self, name: str) -> bool: + name = name.lower() + return name in self._variables + + def __str__(self) -> str: + return self._variables[self._main_key] + + def clone(self) -> "ContextVariables": + new_vars = ContextVariables() + new_vars._variables = self._variables.copy() + return new_vars diff --git a/python/semantic_kernel/orchestration/delegate_handlers.py b/python/semantic_kernel/orchestration/delegate_handlers.py new file mode 100644 index 000000000000..f9b06c1b063e --- /dev/null +++ b/python/semantic_kernel/orchestration/delegate_handlers.py @@ -0,0 +1,150 @@ +# Copyright (c) Microsoft. All rights reserved. + + +from semantic_kernel.kernel_exception import KernelException +from semantic_kernel.orchestration.delegate_types import DelegateTypes + + +def _handles(delegate_type): + def decorator(function): + function._delegate_type = delegate_type + return function + + return decorator + + +class DelegateHandlers: + @staticmethod + @_handles(DelegateTypes.Void) + async def handle_void(function, context): + function() + return context + + @staticmethod + @_handles(DelegateTypes.OutString) + async def handle_out_string(function, context): + context.variables.update(function()) + return context + + @staticmethod + @_handles(DelegateTypes.OutTaskString) + async def handle_out_task_string(function, context): + context.variables.update(await function()) + return context + + @staticmethod + @_handles(DelegateTypes.InSKContext) + async def handle_in_sk_context(function, context): + function(context) + return context + + @staticmethod + @_handles(DelegateTypes.InSKContextOutString) + async def handle_in_sk_context_out_string(function, context): + context.variables.update(function(context)) + return context + + @staticmethod + @_handles(DelegateTypes.InSKContextOutTaskString) + async def handle_in_sk_context_out_task_string(function, context): + context.variables.update(await function(context)) + return context + + @staticmethod + @_handles(DelegateTypes.ContextSwitchInSKContextOutTaskSKContext) + async def handle_context_switch_in_sk_context_out_task_sk_context( + function, context + ): + # Note: Context Switching: allows the function to replace with a + # new context, e.g. to branch execution path + context = await function(context) + return context + + @staticmethod + @_handles(DelegateTypes.InString) + async def handle_in_string(function, context): + function(context.variables.input) + return context + + @staticmethod + @_handles(DelegateTypes.InStringOutString) + async def handle_in_string_out_string(function, context): + context.variables.update(function(context.variables.input)) + return context + + @staticmethod + @_handles(DelegateTypes.InStringOutTaskString) + async def handle_in_string_out_task_string(function, context): + context.variables.update(await function(context.variables.input)) + return context + + @staticmethod + @_handles(DelegateTypes.InStringAndContext) + async def handle_in_string_and_context(function, context): + function(context.variables.input, context) + return context + + @staticmethod + @_handles(DelegateTypes.InStringAndContextOutString) + async def handle_in_string_and_context_out_string(function, context): + context.variables.update(function(context.variables.input, context)) + return context + + @staticmethod + @_handles(DelegateTypes.InStringAndContextOutTaskString) + async def handle_in_string_and_context_out_task_string(function, context): + context.variables.update(await function(context.variables.input, context)) + return context + + @staticmethod + @_handles(DelegateTypes.ContextSwitchInStringAndContextOutTaskContext) + async def handle_context_switch_in_string_and_context_out_task_context( + function, context + ): + # Note: Context Switching: allows the function to replace with a + # new context, e.g. to branch execution path + context = await function(context.variables.input, context) + return context + + @staticmethod + @_handles(DelegateTypes.InStringOutTask) + async def handle_in_string_out_task(function, context): + await function(context.variables.input) + return context + + @staticmethod + @_handles(DelegateTypes.InContextOutTask) + async def handle_in_context_out_task(function, context): + await function(context) + return context + + @staticmethod + @_handles(DelegateTypes.InStringAndContextOutTask) + async def handle_in_string_and_context_out_task(function, context): + await function(context.variables.input, context) + return context + + @staticmethod + @_handles(DelegateTypes.OutTask) + async def handle_out_task(function, context): + await function() + return context + + @staticmethod + @_handles(DelegateTypes.Unknown) + async def handle_unknown(function, context): + raise KernelException( + KernelException.ErrorCodes.FunctionTypeNotSupported, + "Invalid function type detected, unable to execute.", + ) + + @staticmethod + def get_handler(delegate_type): + for name, value in DelegateHandlers.__dict__.items(): + if name.startswith("handle_") and hasattr( + value.__wrapped__, "_delegate_type" + ): + if value.__wrapped__._delegate_type == delegate_type: + return value + + return DelegateHandlers.handle_unknown diff --git a/python/semantic_kernel/orchestration/delegate_inference.py b/python/semantic_kernel/orchestration/delegate_inference.py new file mode 100644 index 000000000000..246bc4672a37 --- /dev/null +++ b/python/semantic_kernel/orchestration/delegate_inference.py @@ -0,0 +1,246 @@ +# Copyright (c) Microsoft. All rights reserved. + +from inspect import Signature, iscoroutinefunction, signature +from typing import NoReturn + +from semantic_kernel.kernel_exception import KernelException +from semantic_kernel.orchestration.delegate_types import DelegateTypes +from semantic_kernel.orchestration.sk_context import SKContext + + +def _infers(delegate_type): + def decorator(function): + function._delegate_type = delegate_type + return function + + return decorator + + +def _has_no_params(signature: Signature) -> bool: + return len(signature.parameters) == 0 + + +def _return_is_str(signature: Signature) -> bool: + return signature.return_annotation is str + + +def _return_is_context(signature: Signature) -> bool: + return signature.return_annotation is SKContext + + +def _no_return(signature: Signature) -> bool: + return signature.return_annotation is Signature.empty + + +def _has_first_param_with_type( + signature: Signature, annotation, only: bool = True +) -> bool: + if len(signature.parameters) < 1: + return False + if only and len(signature.parameters) != 1: + return False + + first_param = list(signature.parameters.values())[0] + return first_param.annotation is annotation + + +def _has_two_params_second_is_context(signature: Signature) -> bool: + if len(signature.parameters) < 2: + return False + second_param = list(signature.parameters.values())[1] + return second_param.annotation is SKContext + + +def _first_param_is_str(signature: Signature, only: bool = True) -> bool: + return _has_first_param_with_type(signature, str, only) + + +def _first_param_is_context(signature: Signature) -> bool: + return _has_first_param_with_type(signature, SKContext) + + +class DelegateInference: + @staticmethod + @_infers(DelegateTypes.Void) + def infer_void(signature: Signature, awaitable: bool) -> bool: + matches = _has_no_params(signature) + matches = matches and _no_return(signature) + matches = matches and not awaitable + return matches + + @staticmethod + @_infers(DelegateTypes.OutString) + def infer_out_string(signature: Signature, awaitable: bool) -> bool: + matches = _has_no_params(signature) + matches = matches and _return_is_str(signature) + matches = matches and not awaitable + return matches + + @staticmethod + @_infers(DelegateTypes.OutTaskString) + def infer_out_task_string(signature: Signature, awaitable: bool) -> bool: + matches = _has_no_params(signature) + matches = matches and _return_is_str(signature) + matches = matches and awaitable + return matches + + @staticmethod + @_infers(DelegateTypes.InSKContext) + def infer_in_sk_context(signature: Signature, awaitable: bool) -> bool: + matches = _first_param_is_context(signature) + matches = matches and _no_return(signature) + matches = matches and not awaitable + return matches + + @staticmethod + @_infers(DelegateTypes.InSKContextOutString) + def infer_in_sk_context_out_string(signature: Signature, awaitable: bool) -> bool: + matches = _first_param_is_context(signature) + matches = matches and _return_is_str(signature) + matches = matches and not awaitable + return matches + + @staticmethod + @_infers(DelegateTypes.InSKContextOutTaskString) + def infer_in_sk_context_out_task_string( + signature: Signature, awaitable: bool + ) -> bool: + matches = _first_param_is_context(signature) + matches = matches and _return_is_str(signature) + matches = matches and awaitable + return matches + + @staticmethod + @_infers(DelegateTypes.ContextSwitchInSKContextOutTaskSKContext) + def infer_context_switch_in_sk_context_out_task_sk_context( + signature: Signature, awaitable: bool + ) -> bool: + matches = _first_param_is_context(signature) + matches = matches and _return_is_context(signature) + matches = matches and awaitable + return matches + + @staticmethod + @_infers(DelegateTypes.InString) + def infer_in_string(signature: Signature, awaitable: bool) -> bool: + matches = _first_param_is_str(signature) + matches = matches and _no_return(signature) + matches = matches and not awaitable + return matches + + @staticmethod + @_infers(DelegateTypes.InStringOutString) + def infer_in_string_out_string(signature: Signature, awaitable: bool) -> bool: + matches = _first_param_is_str(signature) + matches = matches and _return_is_str(signature) + matches = matches and not awaitable + return matches + + @staticmethod + @_infers(DelegateTypes.InStringOutTaskString) + def infer_in_string_out_task_string(signature: Signature, awaitable: bool) -> bool: + matches = _first_param_is_str(signature) + matches = matches and _return_is_str(signature) + matches = matches and awaitable + return matches + + @staticmethod + @_infers(DelegateTypes.InStringAndContext) + def infer_in_string_and_context(signature: Signature, awaitable: bool) -> bool: + matches = _first_param_is_str(signature) + matches = matches and _first_param_is_context(signature) + matches = matches and _no_return(signature) + matches = matches and not awaitable + return matches + + @staticmethod + @_infers(DelegateTypes.InStringAndContextOutString) + def infer_in_string_and_context_out_string( + signature: Signature, awaitable: bool + ) -> bool: + matches = _first_param_is_str(signature, only=False) + matches = matches and _has_two_params_second_is_context(signature) + matches = matches and _return_is_str(signature) + matches = matches and not awaitable + return matches + + @staticmethod + @_infers(DelegateTypes.InStringAndContextOutTaskString) + def infer_in_string_and_context_out_task_string( + signature: Signature, awaitable: bool + ) -> bool: + matches = _first_param_is_str(signature, only=False) + matches = matches and _has_two_params_second_is_context(signature) + matches = matches and _return_is_str(signature) + matches = matches and awaitable + return matches + + @staticmethod + @_infers(DelegateTypes.ContextSwitchInStringAndContextOutTaskContext) + def infer_context_switch_in_string_and_context_out_task_context( + signature: Signature, awaitable: bool + ) -> bool: + matches = _first_param_is_str(signature, only=False) + matches = matches and _has_two_params_second_is_context(signature) + matches = matches and _return_is_context(signature) + matches = matches and awaitable + return matches + + @staticmethod + @_infers(DelegateTypes.InStringOutTask) + def infer_in_string_out_task(signature: Signature, awaitable: bool) -> bool: + matches = _first_param_is_str(signature) + matches = matches and _no_return(signature) + matches = matches and awaitable + return matches + + @staticmethod + @_infers(DelegateTypes.InContextOutTask) + def infer_in_context_out_task(signature: Signature, awaitable: bool) -> bool: + matches = _first_param_is_context(signature) + matches = matches and _no_return(signature) + matches = matches and awaitable + return matches + + @staticmethod + @_infers(DelegateTypes.InStringAndContextOutTask) + def infer_in_string_and_context_out_task( + signature: Signature, awaitable: bool + ) -> bool: + matches = _first_param_is_str(signature, only=False) + matches = matches and _has_two_params_second_is_context(signature) + matches = matches and _no_return(signature) + matches = matches and awaitable + return matches + + @staticmethod + @_infers(DelegateTypes.OutTask) + def infer_out_task(signature: Signature, awaitable: bool) -> bool: + matches = _has_no_params(signature) + matches = matches and _no_return(signature) + matches = matches and awaitable + return matches + + @staticmethod + @_infers(DelegateTypes.Unknown) + def infer_unknown(signature: Signature) -> NoReturn: + raise KernelException( + KernelException.ErrorCodes.FunctionTypeNotSupported, + "Invalid function type detected, unable to infer DelegateType.", + ) + + @staticmethod + def infer_delegate_type(function) -> DelegateTypes: + # Get the function signature + function_signature = signature(function) + awaitable = iscoroutinefunction(function) + + for name, value in DelegateInference.__dict__.items(): + if name.startswith("infer_") and hasattr( + value.__wrapped__, "_delegate_type" + ): + # Get the delegate type + if value.__wrapped__(function_signature, awaitable): + return value.__wrapped__._delegate_type + + return DelegateTypes.Unknown diff --git a/python/semantic_kernel/orchestration/delegate_types.py b/python/semantic_kernel/orchestration/delegate_types.py new file mode 100644 index 000000000000..367062ca71e4 --- /dev/null +++ b/python/semantic_kernel/orchestration/delegate_types.py @@ -0,0 +1,25 @@ +# Copyright (c) Microsoft. All rights reserved. + +from enum import Enum + + +class DelegateTypes(Enum): + Unknown = 0 + Void = 1 + OutString = 2 + OutTaskString = 3 + InSKContext = 4 + InSKContextOutString = 5 + InSKContextOutTaskString = 6 + ContextSwitchInSKContextOutTaskSKContext = 7 + InString = 8 + InStringOutString = 9 + InStringOutTaskString = 10 + InStringAndContext = 11 + InStringAndContextOutString = 12 + InStringAndContextOutTaskString = 13 + ContextSwitchInStringAndContextOutTaskContext = 14 + InStringOutTask = 15 + InContextOutTask = 16 + InStringAndContextOutTask = 17 + OutTask = 18 diff --git a/python/semantic_kernel/orchestration/sk_context.py b/python/semantic_kernel/orchestration/sk_context.py new file mode 100644 index 000000000000..59de9c29f471 --- /dev/null +++ b/python/semantic_kernel/orchestration/sk_context.py @@ -0,0 +1,237 @@ +# Copyright (c) Microsoft. All rights reserved. + +from logging import Logger +from typing import Any, Literal, Optional, Tuple, Union + +from semantic_kernel.diagnostics.verify import Verify +from semantic_kernel.kernel_exception import KernelException +from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase +from semantic_kernel.orchestration.context_variables import ContextVariables +from semantic_kernel.skill_definition.read_only_skill_collection_base import ( + ReadOnlySkillCollectionBase, +) + + +class SKContext: + """Semantic Kernel context.""" + + _error_occurred: bool = False + _last_exception: Optional[Exception] = None + _last_error_description: str = "" + _logger: Logger + _memory: SemanticTextMemoryBase + _skill_collection: ReadOnlySkillCollectionBase + _variables: ContextVariables + + def __init__( + self, + variables: ContextVariables, + memory: SemanticTextMemoryBase, + skill_collection: ReadOnlySkillCollectionBase, + logger: Logger, + # TODO: cancellation token? + ) -> None: + """ + Initializes a new instance of the SKContext class. + + Arguments: + variables {ContextVariables} -- The context variables. + memory {SemanticTextMemoryBase} -- The semantic text memory. + skill_collection {ReadOnlySkillCollectionBase} -- The skill collection. + logger {Logger} -- The logger. + """ + self._variables = variables + self._memory = memory + self._skill_collection = skill_collection + self._logger = logger + + def fail(self, error_description: str, exception: Optional[Exception] = None): + """ + Call this method to signal that an error occurred. + In the usual scenarios, this is also how execution is stopped + e.g., to inform the user or take necessary steps. + + Arguments: + error_description {str} -- The error description. + + Keyword Arguments: + exception {Exception} -- The exception (default: {None}). + """ + self._error_occurred = True + self._last_error_description = error_description + self._last_exception = exception + + @property + def result(self) -> str: + """ + Print the processed input, aka the current data + after any processing that has occurred. + + Returns: + str -- Processed input, aka result. + """ + return str(self._variables) + + @property + def error_occurred(self) -> bool: + """ + Whether an error occurred while executing functions in the pipeline. + + Returns: + bool -- Whether an error occurred. + """ + return self._error_occurred + + @property + def last_error_description(self) -> str: + """ + The last error description. + + Returns: + str -- The last error description. + """ + return self._last_error_description + + @property + def last_exception(self) -> Optional[Exception]: + """ + When an error occurs, this is the most recent exception. + + Returns: + Exception -- The most recent exception. + """ + return self._last_exception + + @property + def variables(self) -> ContextVariables: + """ + User variables. + + Returns: + ContextVariables -- The context variables. + """ + return self._variables + + @property + def memory(self) -> SemanticTextMemoryBase: + """ + The semantic text memory. + + Returns: + SemanticTextMemoryBase -- The semantic text memory. + """ + return self._memory + + @property + def skills(self) -> ReadOnlySkillCollectionBase: + """ + Read only skills collection. + + Returns: + ReadOnlySkillCollectionBase -- The skills collection. + """ + return self._skill_collection + + @property + def log(self) -> Logger: + """ + The logger. + + Returns: + Logger -- The logger. + """ + return self._logger + + def __setitem__(self, key: str, value: Any) -> None: + """ + Sets a context variable. + + Arguments: + key {str} -- The variable name. + value {Any} -- The variable value. + """ + self._variables[key] = value + + def __getitem__(self, key: str) -> Any: + """ + Gets a context variable. + + Arguments: + key {str} -- The variable name. + + Returns: + Any -- The variable value. + """ + return self._variables[key] + + def func(self, skill_name: str, function_name: str): + """ + Access registered functions by skill + name. Not case sensitive. + The function might be native or semantic, it's up to the caller + handling it. + + Arguments: + skill_name {str} -- The skill name. + function_name {str} -- The function name. + + Returns: + SKFunctionBase -- The function. + """ + Verify.not_null(self._skill_collection, "The skill collection hasn't been set") + assert self._skill_collection is not None # for type checker + + if self._skill_collection.has_native_function(skill_name, function_name): + return self._skill_collection.get_native_function(skill_name, function_name) + + return self._skill_collection.get_semantic_function(skill_name, function_name) + + def __str__(self) -> str: + if self._error_occurred: + return f"Error: {self._last_error_description}" + + return self.result + + def throw_if_skill_collection_not_set(self) -> None: + """ + Throws an exception if the skill collection hasn't been set. + """ + if self._skill_collection is None: + raise KernelException( + KernelException.ErrorCodes.SkillCollectionNotSet, + "Skill collection not found in the context", + ) + + def is_function_registered( + self, skill_name: str, function_name: str + ) -> Union[Tuple[Literal[True], Any], Tuple[Literal[False], None]]: + """ + Checks whether a function is registered in this context. + + Arguments: + skill_name {str} -- The skill name. + function_name {str} -- The function name. + + Returns: + Tuple[bool, SKFunctionBase] -- A tuple with a boolean indicating + whether the function is registered and the function itself (or None). + """ + self.throw_if_skill_collection_not_set() + assert self._skill_collection is not None # for type checker + + if self._skill_collection.has_native_function(skill_name, function_name): + the_func = self._skill_collection.get_native_function( + skill_name, function_name + ) + return True, the_func + + if self._skill_collection.has_native_function(None, function_name): + the_func = self._skill_collection.get_native_function(None, function_name) + return True, the_func + + if self._skill_collection.has_semantic_function(skill_name, function_name): + the_func = self._skill_collection.get_semantic_function( + skill_name, function_name + ) + return True, the_func + + return False, None diff --git a/python/semantic_kernel/orchestration/sk_function.py b/python/semantic_kernel/orchestration/sk_function.py new file mode 100644 index 000000000000..26c13215cc5a --- /dev/null +++ b/python/semantic_kernel/orchestration/sk_function.py @@ -0,0 +1,288 @@ +# Copyright (c) Microsoft. All rights reserved. + +from enum import Enum +from logging import Logger +from typing import Any, Callable, List, Optional + +from semantic_kernel.ai.complete_request_settings import CompleteRequestSettings +from semantic_kernel.ai.text_completion_client_base import TextCompletionClientBase +from semantic_kernel.diagnostics.verify import Verify +from semantic_kernel.kernel_exception import KernelException +from semantic_kernel.memory.null_memory import NullMemory +from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase +from semantic_kernel.orchestration.context_variables import ContextVariables +from semantic_kernel.orchestration.delegate_handlers import DelegateHandlers +from semantic_kernel.orchestration.delegate_inference import DelegateInference +from semantic_kernel.orchestration.delegate_types import DelegateTypes +from semantic_kernel.orchestration.sk_context import SKContext +from semantic_kernel.orchestration.sk_function_base import SKFunctionBase +from semantic_kernel.semantic_functions.semantic_function_config import ( + SemanticFunctionConfig, +) +from semantic_kernel.skill_definition.function_view import FunctionView +from semantic_kernel.skill_definition.parameter_view import ParameterView +from semantic_kernel.skill_definition.read_only_skill_collection_base import ( + ReadOnlySkillCollectionBase, +) +from semantic_kernel.utils.null_logger import NullLogger + + +class SKFunction(SKFunctionBase): + _parameters: List[ParameterView] + _delegate_type: DelegateTypes + _function: Callable[..., Any] + _skill_collection: Optional[ReadOnlySkillCollectionBase] + _log: Logger + _ai_backend: Optional[TextCompletionClientBase] + _ai_request_settings: CompleteRequestSettings + + @staticmethod + def from_native_method(method, skill_name="", log=None) -> "SKFunction": + Verify.not_null(method, "Method is empty") + + assert method.__sk_function__ is not None, "Method is not a SK function" + assert method.__sk_function_name__ is not None, "Method name is empty" + + parameters = [] + for param in method.__sk_function_context_parameters__: + assert "name" in param, "Parameter name is empty" + assert "description" in param, "Parameter description is empty" + assert "default_value" in param, "Parameter default value is empty" + + parameters.append( + ParameterView( + param["name"], param["description"], param["default_value"] + ) + ) + + if hasattr(method, "__sk_function_input_description__"): + input_param = ParameterView( + "input", + method.__sk_function_input_description__, + method.__sk_function_input_default_value__, + ) + parameters = [input_param] + parameters + + return SKFunction( + delegate_type=DelegateInference.infer_delegate_type(method), + delegate_function=method, + parameters=parameters, + description=method.__sk_function_description__, + skill_name=skill_name, + function_name=method.__sk_function_name__, + is_semantic=False, + log=log, + ) + + @staticmethod + def from_semantic_config( + skill_name: str, + function_name: str, + function_config: SemanticFunctionConfig, + log: Optional[Logger] = None, + ) -> "SKFunction": + Verify.not_null(function_config, "Function configuration is empty") + + async def _local_func(client, request_settings, context): + Verify.not_null(client, "AI LLM backend is empty") + + try: + prompt = await function_config.prompt_template.render_async(context) + completion = await client.complete_simple_async( + prompt, request_settings + ) + context.variables.update(completion) + except Exception as e: + # TODO: "critical exceptions" + context.fail(str(e), e) + + return context + + return SKFunction( + delegate_type=DelegateTypes.ContextSwitchInSKContextOutTaskSKContext, + delegate_function=_local_func, + parameters=function_config.prompt_template.get_parameters(), + description=function_config.prompt_template_config.description, + skill_name=skill_name, + function_name=function_name, + is_semantic=True, + log=log, + ) + + @property + def name(self) -> str: + return self._name + + @property + def skill_name(self) -> str: + return self._skill_name + + @property + def description(self) -> str: + return self._description + + @property + def parameters(self) -> List[ParameterView]: + return self._parameters + + @property + def is_semantic(self) -> bool: + return self._is_semantic + + @property + def is_native(self) -> bool: + return not self._is_semantic + + @property + def request_settings(self) -> CompleteRequestSettings: + return self._ai_request_settings + + def __init__( + self, + delegate_type: DelegateTypes, + delegate_function: Callable[..., Any], + parameters: List[ParameterView], + description: str, + skill_name: str, + function_name: str, + is_semantic: bool, + log: Optional[Logger] = None, + ) -> None: + self._delegate_type = delegate_type + self._function = delegate_function + self._parameters = parameters + self._description = description + self._skill_name = skill_name + self._name = function_name + self._is_semantic = is_semantic + self._log = log if log is not None else NullLogger() + self._skill_collection = None + self._ai_backend = None + self._ai_request_settings = CompleteRequestSettings() + + def set_default_skill_collection( + self, skills: ReadOnlySkillCollectionBase + ) -> "SKFunction": + self._skill_collection = skills + return self + + def set_ai_backend( + self, ai_backend: Callable[[], TextCompletionClientBase] + ) -> "SKFunction": + Verify.not_null(ai_backend, "AI LLM backend factory is empty") + self._verify_is_semantic() + self._ai_backend = ai_backend() + return self + + def set_ai_configuration(self, settings: CompleteRequestSettings) -> "SKFunction": + Verify.not_null(settings, "AI LLM request settings are empty") + self._verify_is_semantic() + self._ai_request_settings = settings + return self + + def describe(self) -> FunctionView: + return FunctionView( + name=self.name, + skill_name=self.skill_name, + description=self.description, + is_semantic=self.is_semantic, + parameters=self._parameters, + ) + + async def invoke_async( + self, + input: Optional[str] = None, + context: Optional[SKContext] = None, + settings: Optional[CompleteRequestSettings] = None, + log: Optional[Logger] = None, + ) -> SKContext: + if context is None: + Verify.not_null(self._skill_collection, "Skill collection is empty") + assert self._skill_collection is not None + + context = SKContext( + ContextVariables(""), + NullMemory.instance, # type: ignore + self._skill_collection, + log if log is not None else self._log, + # TODO: ctoken? + ) + + if input is not None: + context.variables.update(input) + + if self.is_semantic: + return await self._invoke_semantic_async(context, settings) + else: + return await self._invoke_native_async(context) + + async def invoke_with_custom_input_async( + self, + input: ContextVariables, + memory: SemanticTextMemoryBase, + skills: ReadOnlySkillCollectionBase, + log: Optional[Logger] = None, + ) -> SKContext: + tmp_context = SKContext( + input, + memory, + skills, + log if log is not None else self._log, + ) + + try: + return await self.invoke_async(input=None, context=tmp_context, log=log) + except Exception as e: + tmp_context.fail(str(e), e) + return tmp_context + + async def _invoke_semantic_async(self, context, settings): + self._verify_is_semantic() + + self._ensure_context_has_skills(context) + + if settings is None: + settings = self._ai_request_settings + + new_context = await self._function(self._ai_backend, settings, context) + context.variables.merge_or_overwrite(new_context.variables) + return context + + async def _invoke_native_async(self, context): + self._verify_is_native() + + self._ensure_context_has_skills(context) + + delegate = DelegateHandlers.get_handler(self._delegate_type) + new_context = await delegate(self._function, context) + + return new_context + + def _verify_is_semantic(self) -> None: + if self._is_semantic: + return + + self._log.error("The function is not semantic") + raise KernelException( + KernelException.ErrorCodes.InvalidFunctionType, + "Invalid operation, the method requires a semantic function", + ) + + def _verify_is_native(self) -> None: + if not self._is_semantic: + return + + self._log.error("The function is not native") + raise KernelException( + KernelException.ErrorCodes.InvalidFunctionType, + "Invalid operation, the method requires a native function", + ) + + def _ensure_context_has_skills(self, context) -> None: + if context.skills is not None: + return + + context.skills = self._skill_collection + + def _trace_function_type_Call(self, type: Enum, log: Logger) -> None: + log.debug(f"Executing function type {type}: {type.name}") diff --git a/python/semantic_kernel/orchestration/sk_function_base.py b/python/semantic_kernel/orchestration/sk_function_base.py new file mode 100644 index 000000000000..a49d20dc17c5 --- /dev/null +++ b/python/semantic_kernel/orchestration/sk_function_base.py @@ -0,0 +1,188 @@ +# Copyright (c) Microsoft. All rights reserved. + +from abc import ABC, abstractmethod +from logging import Logger +from typing import TYPE_CHECKING, Callable, Optional + +from semantic_kernel.ai.complete_request_settings import CompleteRequestSettings +from semantic_kernel.ai.text_completion_client_base import TextCompletionClientBase +from semantic_kernel.memory.semantic_text_memory_base import SemanticTextMemoryBase +from semantic_kernel.orchestration.context_variables import ContextVariables +from semantic_kernel.orchestration.sk_context import SKContext +from semantic_kernel.skill_definition.function_view import FunctionView + +if TYPE_CHECKING: + from semantic_kernel.skill_definition.read_only_skill_collection_base import ( + ReadOnlySkillCollectionBase, + ) + + +class SKFunctionBase(ABC): + @property + @abstractmethod + def name(self) -> str: + """ + Name of the function. + + The name is used by the skill collection and in + prompt templates; e.g., {{skillName.functionName}} + """ + pass + + @property + @abstractmethod + def skill_name(self) -> str: + """ + Name of the skill that contains this function. + + The name is used by the skill collection and in + prompt templates; e.g., {{skillName.functionName}}""" + pass + + @property + @abstractmethod + def description(self) -> str: + """ + Function description. + + The description is used in combination with embeddings + when searching for relevant functions.""" + pass + + @property + @abstractmethod + def is_semantic(self) -> bool: + """ + Whether the function is semantic. + + IMPORTANT: native functions might use semantic functions + internally, so when this property is False, executing + the function might still involve AI calls. + """ + pass + + @property + @abstractmethod + def is_native(self) -> bool: + """ + Whether the function is native. + + IMPORTANT: native functions might use semantic functions + internally, so when this property is True, executing + the function might still involve AI calls. + """ + pass + + @property + @abstractmethod + def request_settings(self) -> CompleteRequestSettings: + """AI backend settings""" + pass + + @abstractmethod + def describe() -> FunctionView: + """ + Returns a description of the function, + including its parameters + + Returns: + FunctionView -- The function description. + """ + pass + + @abstractmethod + async def invoke_async( + self, + input: Optional[str] = None, + context: Optional[SKContext] = None, + settings: Optional[CompleteRequestSettings] = None, + log: Optional[Logger] = None + # TODO: ctoken + ) -> SKContext: + """ + Invokes the function with an explicit string input + + Keyword Arguments: + input {str} -- The explicit string input (default: {None}) + context {SKContext} -- The context to use + settings {CompleteRequestSettings} -- LLM completion settings + log {Logger} -- Application logger + + Returns: + SKContext -- The updated context, potentially a new one if + context switching is implemented. + """ + pass + + @abstractmethod + async def invoke_with_custom_input_async( + self, + input: ContextVariables, + memory: SemanticTextMemoryBase, + skills: "ReadOnlySkillCollectionBase", + log: Optional[Logger] = None, + ) -> SKContext: + """ + Invokes the function with a custom input + + Arguments: + input {ContextVariables} -- The custom input + memory {SemanticTextMemoryBase} -- The memory to use + skills {ReadOnlySkillCollectionBase} -- The skill collection to use + log {Logger} -- Application logger + + Returns: + SKContext -- The updated context, potentially a new one if + context switching is implemented. + """ + pass + + @abstractmethod + def set_default_skill_collection( + self, + skills: "ReadOnlySkillCollectionBase", + ) -> "SKFunctionBase": + """ + Sets the skill collection to use when the function is + invoked without a context or with a context that doesn't have + a skill collection + + Arguments: + skills {ReadOnlySkillCollectionBase} -- Kernel's skill collection + + Returns: + SKFunctionBase -- The function instance + """ + pass + + @abstractmethod + def set_ai_backend( + self, backend_factory: Callable[[], TextCompletionClientBase] + ) -> "SKFunctionBase": + """ + Sets the AI backend used by the semantic function, passing in a factory + method. The factory allows us to lazily instantiate the client and to + properly handle its disposal + + Arguments: + backend_factory -- AI backend factory + + Returns: + SKFunctionBase -- The function instance + """ + pass + + @abstractmethod + def set_ai_configuration( + self, settings: CompleteRequestSettings + ) -> "SKFunctionBase": + """ + Sets the AI completion settings used with LLM requests + + Arguments: + settings {CompleteRequestSettings} -- LLM completion settings + + Returns: + SKFunctionBase -- The function instance + """ + pass diff --git a/python/semantic_kernel/reliability/pass_through_without_retry.py b/python/semantic_kernel/reliability/pass_through_without_retry.py new file mode 100644 index 000000000000..7c8236ad0a06 --- /dev/null +++ b/python/semantic_kernel/reliability/pass_through_without_retry.py @@ -0,0 +1,30 @@ +# Copyright (c) Microsoft. All rights reserved. + +import logging +from typing import Awaitable, Callable, TypeVar + +from semantic_kernel.reliability.retry_mechanism import RetryMechanism + +T = TypeVar("T") + + +class PassThroughWithoutRetry(RetryMechanism): + """A retry mechanism that does not retry.""" + + async def execute_with_retry_async( + self, action: Callable[[], Awaitable[T]], log: logging.Logger + ) -> Awaitable[T]: + """Executes the given action with retry logic. + + Arguments: + action {Callable[[], Awaitable[T]]} -- The action to retry on exception. + log {logging.Logger} -- The logger to use. + + Returns: + Awaitable[T] -- An awaitable that will return the result of the action. + """ + try: + await action() + except Exception as e: + log.warning(e, "Error executing action, not retrying") + raise diff --git a/python/semantic_kernel/reliability/retry_mechanism.py b/python/semantic_kernel/reliability/retry_mechanism.py new file mode 100644 index 000000000000..9cc506463e33 --- /dev/null +++ b/python/semantic_kernel/reliability/retry_mechanism.py @@ -0,0 +1,24 @@ +# Copyright (c) Microsoft. All rights reserved. + +import abc +import logging +from typing import Awaitable, Callable, TypeVar + +T = TypeVar("T") + + +class RetryMechanism(abc.ABC): + @abc.abstractmethod + async def execute_with_retry_async( + self, action: Callable[[], Awaitable[T]], log: logging.Logger + ) -> Awaitable[T]: + """Executes the given action with retry logic. + + Arguments: + action {Callable[[], Awaitable[T]]} -- The action to retry on exception. + log {logging.Logger} -- The logger to use. + + Returns: + Awaitable[T] -- An awaitable that will return the result of the action. + """ + pass diff --git a/python/semantic_kernel/semantic_functions/prompt_template.py b/python/semantic_kernel/semantic_functions/prompt_template.py new file mode 100644 index 000000000000..5b2060000cdc --- /dev/null +++ b/python/semantic_kernel/semantic_functions/prompt_template.py @@ -0,0 +1,72 @@ +# Copyright (c) Microsoft. All rights reserved. + +from logging import Logger +from typing import TYPE_CHECKING, List, Optional + +from semantic_kernel.semantic_functions.prompt_template_base import PromptTemplateBase +from semantic_kernel.semantic_functions.prompt_template_config import ( + PromptTemplateConfig, +) +from semantic_kernel.skill_definition.parameter_view import ParameterView +from semantic_kernel.template_engine.blocks.block_types import BlockTypes +from semantic_kernel.template_engine.blocks.var_block import VarBlock +from semantic_kernel.template_engine.prompt_template_engine_base import ( + PromptTemplateEngineBase, +) +from semantic_kernel.utils.null_logger import NullLogger + +if TYPE_CHECKING: + from semantic_kernel.orchestration.sk_context import SKContext + + +class PromptTemplate(PromptTemplateBase): + _template: str + _template_engine: PromptTemplateEngineBase + _log: Logger + _prompt_config: PromptTemplateConfig + + def __init__( + self, + template: str, + template_engine: PromptTemplateEngineBase, + prompt_config: PromptTemplateConfig, + log: Optional[Logger] = None, + ) -> None: + self._template = template + self._template_engine = template_engine + self._prompt_config = prompt_config + self._log = log if log is not None else NullLogger() + + def get_parameters(self) -> List[ParameterView]: + seen = set() + + result = [] + for param in self._prompt_config.input.parameters: + if param is None: + continue + + result.append( + ParameterView(param.name, param.description, param.default_value) + ) + + seen.add(param.name) + + blocks = self._template_engine.extract_blocks(self._template) + for block in blocks: + if block.type != BlockTypes.Variable: + continue + if block is None: + continue + + var_block: VarBlock = block # type: ignore + if var_block.name in seen: + continue + + result.append(ParameterView(var_block.name, "", "")) + + seen.add(var_block.name) + + return result + + async def render_async(self, context: "SKContext") -> str: + return await self._template_engine.render_async(self._template, context) diff --git a/python/semantic_kernel/semantic_functions/prompt_template_base.py b/python/semantic_kernel/semantic_functions/prompt_template_base.py new file mode 100644 index 000000000000..13fcb9dcc173 --- /dev/null +++ b/python/semantic_kernel/semantic_functions/prompt_template_base.py @@ -0,0 +1,18 @@ +# Copyright (c) Microsoft. All rights reserved. + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, List + +if TYPE_CHECKING: + from semantic_kernel.orchestration.sk_context import SKContext + from semantic_kernel.skill_definition.parameter_view import ParameterView + + +class PromptTemplateBase(ABC): + @abstractmethod + def get_parameters(self) -> List["ParameterView"]: + pass + + @abstractmethod + async def render_async(self, context: "SKContext") -> str: + pass diff --git a/python/semantic_kernel/semantic_functions/prompt_template_config.py b/python/semantic_kernel/semantic_functions/prompt_template_config.py new file mode 100644 index 000000000000..7600680b016b --- /dev/null +++ b/python/semantic_kernel/semantic_functions/prompt_template_config.py @@ -0,0 +1,93 @@ +# Copyright (c) Microsoft. All rights reserved. + +from dataclasses import dataclass, field +from typing import List + + +@dataclass +class PromptTemplateConfig: + @dataclass + class CompletionConfig: + temperature: float = 0.0 + top_p: float = 1.0 + presence_penalty: float = 0.0 + frequency_penalty: float = 0.0 + max_tokens: int = 256 + stop_sequences: List[str] = field(default_factory=list) + + @dataclass + class InputParameter: + name: str = "" + description: str = "" + default_value: str = "" + + @dataclass + class InputConfig: + parameters: List["PromptTemplateConfig.InputParameter"] = field( + default_factory=list + ) + + schema: int = 1 + type: str = "completion" + description: str = "" + completion: "PromptTemplateConfig.CompletionConfig" = field( + default_factory=CompletionConfig + ) + default_backends: List[str] = field(default_factory=list) + input: "PromptTemplateConfig.InputConfig" = field(default_factory=InputConfig) + + @staticmethod + def from_dict(data: dict) -> "PromptTemplateConfig": + config = PromptTemplateConfig() + config.schema = data.get("schema") + config.type = data.get("type)") + config.description = data.get("description") + + # Some skills may not have all completion parameters defined + config.completion = PromptTemplateConfig.CompletionConfig() + completition_dict = data["completion"] + config.completion.temperature = completition_dict.get("temperature") + config.completion.top_p = completition_dict.get("top_p") + config.completion.presence_penalty = completition_dict.get("presence_penalty") + config.completion.frequency_penalty = completition_dict.get("frequency_penalty") + config.completion.max_tokens = completition_dict.get("max_tokens") + config.completion.stop_sequences = completition_dict.get("stop_sequences") + config.default_backends = data.get("default_backends") + + # Some skills may not have input parameters defined + config.input = PromptTemplateConfig.InputConfig() + config.input.parameters = [] + if data.get("input") is not None: + for parameter in data["input"]["parameters"]: + config.input.parameters.append( + PromptTemplateConfig.InputParameter( + parameter["name"], + parameter["description"], + parameter["default_value"], + ) + ) + return config + + @staticmethod + def from_json(json_str: str) -> "PromptTemplateConfig": + import json + + return PromptTemplateConfig.from_dict(json.loads(json_str)) + + @staticmethod + def from_completion_parameters( + temperature: float = 0.0, + top_p: float = 1.0, + presence_penalty: float = 0.0, + frequency_penalty: float = 0.0, + max_tokens: int = 256, + stop_sequences: List[str] = [], + ) -> "PromptTemplateConfig": + config = PromptTemplateConfig() + config.completion.temperature = temperature + config.completion.top_p = top_p + config.completion.presence_penalty = presence_penalty + config.completion.frequency_penalty = frequency_penalty + config.completion.max_tokens = max_tokens + config.completion.stop_sequences = stop_sequences + return config diff --git a/python/semantic_kernel/semantic_functions/semantic_function_config.py b/python/semantic_kernel/semantic_functions/semantic_function_config.py new file mode 100644 index 000000000000..5a192311224b --- /dev/null +++ b/python/semantic_kernel/semantic_functions/semantic_function_config.py @@ -0,0 +1,16 @@ +# Copyright (c) Microsoft. All rights reserved. + +from dataclasses import dataclass +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from semantic_kernel.semantic_functions.prompt_template import PromptTemplate + from semantic_kernel.semantic_functions.prompt_template_config import ( + PromptTemplateConfig, + ) + + +@dataclass +class SemanticFunctionConfig: + prompt_template_config: "PromptTemplateConfig" + prompt_template: "PromptTemplate" diff --git a/python/semantic_kernel/skill_definition/__init__.py b/python/semantic_kernel/skill_definition/__init__.py new file mode 100644 index 000000000000..b1ea51a3ded0 --- /dev/null +++ b/python/semantic_kernel/skill_definition/__init__.py @@ -0,0 +1,16 @@ +# Copyright (c) Microsoft. All rights reserved. +from semantic_kernel.skill_definition.sk_function_context_parameter_decorator import ( + sk_function_context_parameter, +) +from semantic_kernel.skill_definition.sk_function_decorator import sk_function +from semantic_kernel.skill_definition.sk_function_input_decorator import ( + sk_function_input, +) +from semantic_kernel.skill_definition.sk_function_name_decorator import sk_function_name + +__all__ = [ + "sk_function", + "sk_function_name", + "sk_function_input", + "sk_function_context_parameter", +] diff --git a/python/semantic_kernel/skill_definition/function_view.py b/python/semantic_kernel/skill_definition/function_view.py new file mode 100644 index 000000000000..0d64cec15c93 --- /dev/null +++ b/python/semantic_kernel/skill_definition/function_view.py @@ -0,0 +1,82 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import List + +from semantic_kernel.diagnostics.verify import Verify +from semantic_kernel.skill_definition.parameter_view import ParameterView + + +class FunctionView: + _name: str + _skill_name: str + _description: str + _is_semantic: bool + _is_asynchronous: bool + _parameters: List[ParameterView] + + def __init__( + self, + name: str, + skill_name: str, + description: str, + parameters: List[ParameterView], + is_semantic: bool, + is_asynchronous: bool = True, + ) -> None: + Verify.valid_function_name(name) + + self._name = name + self._skill_name = skill_name + self._description = description + self._parameters = parameters + self._is_semantic = is_semantic + self._is_asynchronous = is_asynchronous + + @property + def name(self) -> str: + return self._name + + @property + def skill_name(self) -> str: + return self._skill_name + + @property + def description(self) -> str: + return self._description + + @property + def parameters(self) -> List[ParameterView]: + return self._parameters + + @property + def is_semantic(self) -> bool: + return self._is_semantic + + @property + def is_asynchronous(self) -> bool: + return self._is_asynchronous + + @name.setter + def name(self, value: str) -> None: + Verify.valid_function_name(value) + self._name = value + + @skill_name.setter + def skill_name(self, value: str) -> None: + self._skill_name = value + + @description.setter + def description(self, value: str) -> None: + self._description = value + + @parameters.setter + def parameters(self, value: List[ParameterView]) -> None: + self._parameters = value + + @is_semantic.setter + def is_semantic(self, value: bool) -> None: + self._is_semantic = value + + @is_asynchronous.setter + def is_asynchronous(self, value: bool) -> None: + self._is_asynchronous = value diff --git a/python/semantic_kernel/skill_definition/functions_view.py b/python/semantic_kernel/skill_definition/functions_view.py new file mode 100644 index 000000000000..4fdbb25918de --- /dev/null +++ b/python/semantic_kernel/skill_definition/functions_view.py @@ -0,0 +1,46 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Dict, List + +from semantic_kernel.kernel_exception import KernelException +from semantic_kernel.skill_definition.function_view import FunctionView + + +class FunctionsView: + _semantic_functions: Dict[str, List[FunctionView]] + _native_functions: Dict[str, List[FunctionView]] + + def __init__(self) -> None: + self._semantic_functions = {} + self._native_functions = {} + + def add_function(self, view: FunctionView) -> "FunctionsView": + if view.is_semantic: + if view.skill_name not in self._semantic_functions: + self._semantic_functions[view.skill_name] = [] + self._semantic_functions[view.skill_name].append(view) + else: + if view.skill_name not in self._native_functions: + self._native_functions[view.skill_name] = [] + self._native_functions[view.skill_name].append(view) + + return self + + def is_semantic(self, skill_name: str, function_name: str) -> bool: + as_sf = self._semantic_functions.get(skill_name, []) + as_sf = any(f.name == function_name for f in as_sf) + + as_nf = self._native_functions.get(skill_name, []) + as_nf = any(f.name == function_name for f in as_nf) + + if as_sf and as_nf: + raise KernelException( + KernelException.ErrorCodes.AmbiguousImplementation, + f"There are 2 functions with the same name: {function_name}." + f"One is native and the other semantic.", + ) + + return as_sf + + def is_native(self, skill_name: str, function_name: str) -> bool: + return not self.is_semantic(skill_name, function_name) diff --git a/python/semantic_kernel/skill_definition/parameter_view.py b/python/semantic_kernel/skill_definition/parameter_view.py new file mode 100644 index 000000000000..c39c145148f2 --- /dev/null +++ b/python/semantic_kernel/skill_definition/parameter_view.py @@ -0,0 +1,41 @@ +# Copyright (c) Microsoft. All rights reserved. + +from semantic_kernel.diagnostics.verify import Verify + + +class ParameterView: + _name: str + _description: str + _default_value: str + + def __init__(self, name: str, description: str, default_value: str) -> None: + Verify.valid_function_param_name(name) + + self._name = name + self._description = description + self._default_value = default_value + + @property + def name(self) -> str: + return self._name + + @property + def description(self) -> str: + return self._description + + @property + def default_value(self) -> str: + return self._default_value + + @name.setter + def name(self, value: str) -> None: + Verify.valid_function_param_name(value) + self._name = value + + @description.setter + def description(self, value: str) -> None: + self._description = value + + @default_value.setter + def default_value(self, value: str) -> None: + self._default_value = value diff --git a/python/semantic_kernel/skill_definition/read_only_skill_collection.py b/python/semantic_kernel/skill_definition/read_only_skill_collection.py new file mode 100644 index 000000000000..789799371a62 --- /dev/null +++ b/python/semantic_kernel/skill_definition/read_only_skill_collection.py @@ -0,0 +1,51 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import TYPE_CHECKING, Optional + +from semantic_kernel.skill_definition.read_only_skill_collection_base import ( + ReadOnlySkillCollectionBase, +) + +if TYPE_CHECKING: + from semantic_kernel.orchestration.sk_function_base import SKFunctionBase + from semantic_kernel.skill_definition.functions_view import FunctionsView + from semantic_kernel.skill_definition.skill_collection_base import ( + SkillCollectionBase, + ) + + +class ReadOnlySkillCollection(ReadOnlySkillCollectionBase): + _skill_collection: "SkillCollectionBase" + + def __init__(self, skill_collection: "SkillCollectionBase") -> None: + self._skill_collection = skill_collection + + def has_function(self, skill_name: Optional[str], function_name: str) -> bool: + return self._skill_collection.has_function(skill_name, function_name) + + def has_semantic_function( + self, skill_name: Optional[str], function_name: str + ) -> bool: + return self._skill_collection.has_semantic_function(skill_name, function_name) + + def has_native_function( + self, skill_name: Optional[str], function_name: str + ) -> bool: + return self._skill_collection.has_native_function(skill_name, function_name) + + def get_semantic_function( + self, skill_name: Optional[str], function_name: str + ) -> "SKFunctionBase": + return self._skill_collection.get_semantic_function(skill_name, function_name) + + def get_native_function( + self, skill_name: Optional[str], function_name: str + ) -> "SKFunctionBase": + return self._skill_collection.get_native_function(skill_name, function_name) + + def get_functions_view( + self, include_semantic: bool = True, include_native: bool = True + ) -> "FunctionsView": + return self._skill_collection.get_functions_view( + include_semantic, include_native + ) diff --git a/python/semantic_kernel/skill_definition/read_only_skill_collection_base.py b/python/semantic_kernel/skill_definition/read_only_skill_collection_base.py new file mode 100644 index 000000000000..16d00378f157 --- /dev/null +++ b/python/semantic_kernel/skill_definition/read_only_skill_collection_base.py @@ -0,0 +1,44 @@ +# Copyright (c) Microsoft. All rights reserved. + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Optional + +if TYPE_CHECKING: + from semantic_kernel.orchestration.sk_function_base import SKFunctionBase + from semantic_kernel.skill_definition.functions_view import FunctionsView + + +class ReadOnlySkillCollectionBase(ABC): + @abstractmethod + def has_function(self, skill_name: Optional[str], function_name: str) -> bool: + pass + + @abstractmethod + def has_semantic_function( + self, skill_name: Optional[str], function_name: str + ) -> bool: + pass + + @abstractmethod + def has_native_function( + self, skill_name: Optional[str], function_name: str + ) -> bool: + pass + + @abstractmethod + def get_semantic_function( + self, skill_name: Optional[str], function_name: str + ) -> "SKFunctionBase": + pass + + @abstractmethod + def get_native_function( + self, skill_name: Optional[str], function_name: str + ) -> "SKFunctionBase": + pass + + @abstractmethod + def get_functions_view( + self, include_semantic: bool = True, include_native: bool = True + ) -> "FunctionsView": + pass diff --git a/python/semantic_kernel/skill_definition/sk_function_context_parameter_decorator.py b/python/semantic_kernel/skill_definition/sk_function_context_parameter_decorator.py new file mode 100644 index 000000000000..b67693b59461 --- /dev/null +++ b/python/semantic_kernel/skill_definition/sk_function_context_parameter_decorator.py @@ -0,0 +1,31 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Optional + + +def sk_function_context_parameter( + *, name: str, description: str, default_value: Optional[str] = None +): + """ + Decorator for SK function context parameters. + + Args: + name -- The name of the context parameter + description -- The description of the context parameter + default_value -- The default value of the context parameter + """ + + def decorator(func): + if not hasattr(func, "__sk_function_context_parameters__"): + func.__sk_function_context_parameters__ = [] + + func.__sk_function_context_parameters__.append( + { + "name": name, + "description": description, + "default_value": default_value, + } + ) + return func + + return decorator diff --git a/python/semantic_kernel/skill_definition/sk_function_decorator.py b/python/semantic_kernel/skill_definition/sk_function_decorator.py new file mode 100644 index 000000000000..7dbd722d1a18 --- /dev/null +++ b/python/semantic_kernel/skill_definition/sk_function_decorator.py @@ -0,0 +1,17 @@ +# Copyright (c) Microsoft. All rights reserved. + + +def sk_function(description: str): + """ + Decorator for SK functions. + + Args: + description -- The description of the function + """ + + def decorator(func): + func.__sk_function__ = True + func.__sk_function_description__ = description + return func + + return decorator diff --git a/python/semantic_kernel/skill_definition/sk_function_input_decorator.py b/python/semantic_kernel/skill_definition/sk_function_input_decorator.py new file mode 100644 index 000000000000..137f7e1e6893 --- /dev/null +++ b/python/semantic_kernel/skill_definition/sk_function_input_decorator.py @@ -0,0 +1,17 @@ +# Copyright (c) Microsoft. All rights reserved. + + +def sk_function_input(*, description: str, default_value: str = ""): + """ + Decorator for SK function inputs. + + Args: + description -- The description of the input + """ + + def decorator(func): + func.__sk_function_input_description__ = description + func.__sk_function_input_default_value__ = default_value + return func + + return decorator diff --git a/python/semantic_kernel/skill_definition/sk_function_name_decorator.py b/python/semantic_kernel/skill_definition/sk_function_name_decorator.py new file mode 100644 index 000000000000..5ce6aa919cfc --- /dev/null +++ b/python/semantic_kernel/skill_definition/sk_function_name_decorator.py @@ -0,0 +1,16 @@ +# Copyright (c) Microsoft. All rights reserved. + + +def sk_function_name(name: str): + """ + Decorator for SK function names. + + Args: + name -- The name of the function + """ + + def decorator(func): + func.__sk_function_name__ = name + return func + + return decorator diff --git a/python/semantic_kernel/skill_definition/skill_collection.py b/python/semantic_kernel/skill_definition/skill_collection.py new file mode 100644 index 000000000000..cec34fd6234d --- /dev/null +++ b/python/semantic_kernel/skill_definition/skill_collection.py @@ -0,0 +1,139 @@ +# Copyright (c) Microsoft. All rights reserved. + +from logging import Logger +from typing import TYPE_CHECKING, Dict, Literal, Optional, Tuple + +from semantic_kernel.diagnostics.verify import Verify +from semantic_kernel.kernel_exception import KernelException +from semantic_kernel.skill_definition.functions_view import FunctionsView +from semantic_kernel.skill_definition.read_only_skill_collection import ( + ReadOnlySkillCollection, +) +from semantic_kernel.skill_definition.skill_collection_base import SkillCollectionBase +from semantic_kernel.utils.null_logger import NullLogger +from semantic_kernel.utils.static_property import static_property + +if TYPE_CHECKING: + from semantic_kernel.orchestration.sk_function_base import SKFunctionBase + from semantic_kernel.skill_definition.read_only_skill_collection_base import ( + ReadOnlySkillCollectionBase, + ) + + +class SkillCollection(SkillCollectionBase): + _skill_collection: Dict[str, Dict[str, "SKFunctionBase"]] + _read_only_skill_collection: "ReadOnlySkillCollectionBase" + _log: Logger + + @property + def read_only_skill_collection(self) -> "ReadOnlySkillCollectionBase": + return self._read_only_skill_collection + + def __init__(self, log: Optional[Logger] = None) -> None: + self._log = log if log is not None else NullLogger() + self._read_only_skill_collection = ReadOnlySkillCollection(self) + self._skill_collection = {} + + def add_semantic_function(self, function: "SKFunctionBase") -> None: + Verify.not_null(function, "The function provided is None") + + s_name, f_name = function.skill_name, function.name + s_name, f_name = s_name.lower(), f_name.lower() + + if s_name not in self._skill_collection: + self._skill_collection[s_name] = {} + + self._skill_collection[s_name][f_name] = function + + def add_native_function(self, function: "SKFunctionBase") -> None: + Verify.not_null(function, "The function provided is None") + + s_name, f_name = function.skill_name, function.name + s_name, f_name = self._normalize_names(s_name, f_name, True) + + if s_name not in self._skill_collection: + self._skill_collection[s_name] = {} + + self._skill_collection[s_name][f_name] = function + + def has_function(self, skill_name: Optional[str], function_name: str) -> bool: + s_name, f_name = self._normalize_names(skill_name, function_name, True) + if s_name not in self._skill_collection: + return False + return f_name in self._skill_collection[s_name] + + def has_semantic_function(self, skill_name: str, function_name: str) -> bool: + s_name, f_name = self._normalize_names(skill_name, function_name) + if s_name not in self._skill_collection: + return False + if f_name not in self._skill_collection[s_name]: + return False + return self._skill_collection[s_name][f_name].is_semantic + + def has_native_function(self, skill_name: str, function_name: str) -> bool: + s_name, f_name = self._normalize_names(skill_name, function_name, True) + if s_name not in self._skill_collection: + return False + if f_name not in self._skill_collection[s_name]: + return False + return self._skill_collection[s_name][f_name].is_native + + def get_semantic_function( + self, skill_name: str, function_name: str + ) -> "SKFunctionBase": + s_name, f_name = self._normalize_names(skill_name, function_name) + if self.has_semantic_function(s_name, f_name): + return self._skill_collection[s_name][f_name] + + self._log.error(f"Function not available: {s_name}.{f_name}") + raise KernelException( + KernelException.ErrorCodes.FunctionNotAvailable, + f"Function not available: {s_name}.{f_name}", + ) + + def get_native_function( + self, skill_name: str, function_name: str + ) -> "SKFunctionBase": + s_name, f_name = self._normalize_names(skill_name, function_name, True) + if self.has_native_function(s_name, f_name): + return self._skill_collection[s_name][f_name] + + self._log.error(f"Function not available: {s_name}.{f_name}") + raise KernelException( + KernelException.ErrorCodes.FunctionNotAvailable, + f"Function not available: {s_name}.{f_name}", + ) + + def get_functions_view( + self, include_semantic: bool = True, include_native: bool = True + ) -> FunctionsView: + result = FunctionsView() + + for skill in self._skill_collection.values(): + for function in skill.values(): + if include_semantic and function.is_semantic: + result.add_function(function.describe()) + elif include_native and function.is_native: + result.add_function(function.describe()) + + return result + + def _normalize_names( + self, + skill_name: Optional[str], + function_name: str, + allow_substitution: bool = False, + ) -> Tuple[str, str]: + s_name, f_name = skill_name, function_name + if s_name is None and allow_substitution: + s_name = self.GLOBAL_SKILL + + Verify.not_null(s_name, "The skill name provided is None") + assert s_name is not None # to make type checker happy + + s_name, f_name = s_name.lower(), f_name.lower() + return s_name, f_name + + @static_property + def GLOBAL_SKILL() -> Literal["_GLOBAL_FUNCTIONS_"]: + return "_GLOBAL_FUNCTIONS_" diff --git a/python/semantic_kernel/skill_definition/skill_collection_base.py b/python/semantic_kernel/skill_definition/skill_collection_base.py new file mode 100644 index 000000000000..0ed7c695e033 --- /dev/null +++ b/python/semantic_kernel/skill_definition/skill_collection_base.py @@ -0,0 +1,30 @@ +# Copyright (c) Microsoft. All rights reserved. + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING + +from semantic_kernel.skill_definition.read_only_skill_collection_base import ( + ReadOnlySkillCollectionBase, +) + +if TYPE_CHECKING: + from semantic_kernel.orchestration.sk_function_base import SKFunctionBase + + +class SkillCollectionBase(ReadOnlySkillCollectionBase, ABC): + @property + @abstractmethod + def read_only_skill_collection(self) -> ReadOnlySkillCollectionBase: + pass + + @abstractmethod + def add_semantic_function( + self, semantic_function: "SKFunctionBase" + ) -> "SkillCollectionBase": + pass + + @abstractmethod + def add_native_function( + self, native_function: "SKFunctionBase" + ) -> "SkillCollectionBase": + pass diff --git a/python/semantic_kernel/template_engine/blocks/block.py b/python/semantic_kernel/template_engine/blocks/block.py new file mode 100644 index 000000000000..05dabf36642a --- /dev/null +++ b/python/semantic_kernel/template_engine/blocks/block.py @@ -0,0 +1,39 @@ +# Copyright (c) Microsoft. All rights reserved. + +from abc import ABC, abstractmethod +from logging import Logger +from typing import Optional, Tuple + +from semantic_kernel.orchestration.context_variables import ContextVariables +from semantic_kernel.orchestration.sk_context import SKContext +from semantic_kernel.template_engine.blocks.block_types import BlockTypes + + +class Block(ABC): + _type: BlockTypes + _content: str + _log: Logger + + def __init__(self, block_type: BlockTypes, content: str, log: Logger) -> None: + self._type = block_type + self._content = content + self._log = log + + async def render_code_async(self, context: SKContext) -> str: + raise NotImplementedError("This block does not support code execution") + + @abstractmethod + def is_valid(self) -> Tuple[bool, str]: + pass + + @abstractmethod + def render(self, variables: Optional[ContextVariables]) -> str: + pass + + @property + def type(self) -> BlockTypes: + return self._type + + @property + def content(self) -> str: + return self._content diff --git a/python/semantic_kernel/template_engine/blocks/block_types.py b/python/semantic_kernel/template_engine/blocks/block_types.py new file mode 100644 index 000000000000..a04eb2e69b51 --- /dev/null +++ b/python/semantic_kernel/template_engine/blocks/block_types.py @@ -0,0 +1,10 @@ +# Copyright (c) Microsoft. All rights reserved. + +from enum import Enum + + +class BlockTypes(Enum): + Undefined = 0 + Text = 1 + Code = 2 + Variable = 3 diff --git a/python/semantic_kernel/template_engine/blocks/code_block.py b/python/semantic_kernel/template_engine/blocks/code_block.py new file mode 100644 index 000000000000..74797d77fc03 --- /dev/null +++ b/python/semantic_kernel/template_engine/blocks/code_block.py @@ -0,0 +1,154 @@ +# Copyright (c) Microsoft. All rights reserved. + +from logging import Logger +from re import match as regex_match +from typing import Optional, Tuple + +from semantic_kernel.orchestration.context_variables import ContextVariables +from semantic_kernel.orchestration.sk_context import SKContext +from semantic_kernel.orchestration.sk_function_base import SKFunctionBase +from semantic_kernel.skill_definition.read_only_skill_collection_base import ( + ReadOnlySkillCollectionBase, +) +from semantic_kernel.template_engine.blocks.block import Block +from semantic_kernel.template_engine.blocks.block_types import BlockTypes +from semantic_kernel.template_engine.blocks.var_block import VarBlock +from semantic_kernel.template_engine.template_exception import TemplateException + + +class CodeBlock(Block): + _validated: bool = False + + def __init__(self, content: str, log: Logger) -> None: + super().__init__(BlockTypes.Code, content, log) + + def _is_valid_function_name(self, name: str) -> bool: + return regex_match(r"^[a-zA-Z_][a-zA-Z0-9_.]*$", name) is not None + + def is_valid(self) -> Tuple[bool, str]: + error = "" + + if self._content is None: + error = "This code block's content is None" + elif self._content.strip() == "": + error = "This code block's content is empty" + + if error != "": + self._log.error(error) + return False, error + + # split content on ' ', '\t', '\r', and '\n' and + # remove any empty parts + parts = [part for part in self._content.split() if part != ""] + + for index, part in enumerate(parts): + if index == 0: # there is only a function name + if VarBlock.has_var_prefix(part): + error = f"Variables cannot be used as function names [`{part}`]" + break + + if not self._is_valid_function_name(part): + error = f"Invalid function name [`{part}`]" + break + else: # the function has parameters + if not VarBlock.has_var_prefix(part): + error = ( + f"[`{part}`] is not a valid function parameter: " + "parameters must be valid variables (invalid prefix)." + ) + break + if len(part) < 2: + error = ( + f"[`{part}`] is not a valid function parameter: " + "parameters must be valid variables (too short)." + ) + if not VarBlock.is_valid_var_name(part[1:]): + error = ( + f"[`{part}`] is not a valid function parameter: " + "parameters must be valid variables (invalid characters)." + ) + break + + if error != "": + self._log.error(error) + return False, error + + self._validated = True + return True, "" + + def render(self, variable: Optional[ContextVariables]) -> str: + raise NotImplementedError( + "Code block rendering requires using the render_code_async method call." + ) + + async def render_code_async(self, context: SKContext) -> str: + if not self._validated: + valid, error = self.is_valid() + if not valid: + raise TemplateException(TemplateException.ErrorCodes.SyntaxError, error) + + self._log.debug(f"Rendering code block: `{self._content}`") + + parts = [part for part in self._content.split() if part != ""] + function_name = parts[0] + + context.throw_if_skill_collection_not_set() + # hack to get types to check, should never fail + assert context.skills is not None + found, function = self._get_function_from_skill_collection( + context.skills, function_name + ) + + if not found: + self._log.warning(f"Function not found: `{function_name}`") + return "" + assert function is not None # for type checker + + if context.variables is None: + self._log.error("Context variables are not set") + return "" + + variables_clone = context.variables.clone() + if len(parts) > 1: + self._log.debug(f"Passing required parameter: `{parts[1]}`") + value = VarBlock(parts[1], self._log).render(variables_clone) + variables_clone.update(value) + + result = await function.invoke_with_custom_input_async( + variables_clone, context.memory, context.skills, self._log + ) + + if result.error_occurred: + self._log.error( + "Semantic function references a function `{function_name}` " + f"of incompatible type `{function.__class__.__name__}`" + ) + return "" + + return result.result + + def _get_function_from_skill_collection( + self, skills: ReadOnlySkillCollectionBase, function_name: str + ) -> Tuple[bool, Optional[SKFunctionBase]]: + if skills.has_native_function(None, function_name): + return True, skills.get_native_function(None, function_name) + + if "." in function_name: + parts = function_name.split(".") + if len(parts) > 2: + self._log.error(f"Invalid function name: `{function_name}`") + raise TemplateException( + TemplateException.ErrorCodes.SyntaxError, + f"Invalid function name: `{function_name}`" + "A function name can only contain one `.` to " + "delineate the skill name from the function name.", + ) + + skill_name, function_name = parts + if skills.has_native_function(skill_name, function_name): + return True, skills.get_native_function(skill_name, function_name) + + if skills.has_semantic_function(skill_name, function_name): + return True, skills.get_semantic_function(skill_name, function_name) + + return False, None diff --git a/python/semantic_kernel/template_engine/blocks/text_block.py b/python/semantic_kernel/template_engine/blocks/text_block.py new file mode 100644 index 000000000000..4df004dc6ea1 --- /dev/null +++ b/python/semantic_kernel/template_engine/blocks/text_block.py @@ -0,0 +1,19 @@ +# Copyright (c) Microsoft. All rights reserved. + +from logging import Logger +from typing import Optional, Tuple + +from semantic_kernel.orchestration.context_variables import ContextVariables +from semantic_kernel.template_engine.blocks.block import Block +from semantic_kernel.template_engine.blocks.block_types import BlockTypes + + +class TextBlock(Block): + def __init__(self, content: str, log: Logger) -> None: + super().__init__(BlockTypes.Text, content, log) + + def is_valid(self) -> Tuple[bool, str]: + return True, "" + + def render(self, _: Optional[ContextVariables]) -> str: + return self._content diff --git a/python/semantic_kernel/template_engine/blocks/var_block.py b/python/semantic_kernel/template_engine/blocks/var_block.py new file mode 100644 index 000000000000..864ec71be6d3 --- /dev/null +++ b/python/semantic_kernel/template_engine/blocks/var_block.py @@ -0,0 +1,76 @@ +# Copyright (c) Microsoft. All rights reserved. + +from logging import Logger +from re import match as regex_match +from typing import Literal, Optional, Tuple + +from semantic_kernel.orchestration.context_variables import ContextVariables +from semantic_kernel.template_engine.blocks.block import Block +from semantic_kernel.template_engine.blocks.block_types import BlockTypes +from semantic_kernel.template_engine.template_exception import TemplateException +from semantic_kernel.utils.static_property import static_property + + +class VarBlock(Block): + _name: str + + def __init__(self, content: str, log: Logger) -> None: + super().__init__(BlockTypes.Variable, content, log) + + def is_valid(self) -> Tuple[bool, str]: + error = "" + + if self._content is None: + error = "This variable's content is None" + elif self._content.strip() == "": + error = "This variable's content is empty" + elif self._content[0] != VarBlock.PREFIX: + error = f"A variable must start with the symbol {VarBlock.PREFIX}" + elif not self.is_valid_var_name(self.name): + error = ( + f"The variable name '{self.name}' contains invalid characters." + " Only alphanumeric characters and underscores are allowed." + ) + + if error != "": + self._log.error(error) + return False, error + + return True, "" + + def render(self, variables: Optional[ContextVariables]) -> str: + if variables is None: + return "" + + if self.name is not None and self.name.strip() != "": + exists, value = variables.get(self.name) + + if not exists: + self._log.warning(f"Variable {VarBlock.PREFIX}{self.name} not found") + + return value if exists else "" + + self._log.error("Variable rendering failed, the variable name is None or empty") + raise TemplateException( + TemplateException.ErrorCodes.SyntaxError, + "Variable rendering failed, the variable name is None or empty", + ) + + @property + def name(self) -> str: + if self._content is None or len(self._content) < 2: + return "" + + return self._content[1:] + + @staticmethod + def has_var_prefix(text: str) -> bool: + return text is not None and text.strip() != "" and text[0] == VarBlock.PREFIX + + @staticmethod + def is_valid_var_name(text: str) -> bool: + return regex_match(r"^[a-zA-Z0-9_]*$", text) is not None + + @static_property + def PREFIX() -> Literal["$"]: + return "$" diff --git a/python/semantic_kernel/template_engine/prompt_template_engine.py b/python/semantic_kernel/template_engine/prompt_template_engine.py new file mode 100644 index 000000000000..5e92f9f1b672 --- /dev/null +++ b/python/semantic_kernel/template_engine/prompt_template_engine.py @@ -0,0 +1,174 @@ +# Copyright (c) Microsoft. All rights reserved. + +from logging import Logger +from typing import List, Optional + +from semantic_kernel.orchestration.context_variables import ContextVariables +from semantic_kernel.orchestration.sk_context import SKContext +from semantic_kernel.template_engine.blocks.block import Block +from semantic_kernel.template_engine.blocks.block_types import BlockTypes +from semantic_kernel.template_engine.blocks.code_block import CodeBlock +from semantic_kernel.template_engine.blocks.text_block import TextBlock +from semantic_kernel.template_engine.blocks.var_block import VarBlock +from semantic_kernel.template_engine.prompt_template_engine_base import ( + PromptTemplateEngineBase, +) +from semantic_kernel.template_engine.template_exception import TemplateException +from semantic_kernel.utils.null_logger import NullLogger + + +class PromptTemplateEngine(PromptTemplateEngineBase): + _log: Logger + + def __init__(self, log: Optional[Logger] = None) -> None: + self._log = log if log is not None else NullLogger() + + def extract_blocks( + self, template_text: Optional[str], validate: bool = True + ) -> List[Block]: + self._log.debug(f"Extracting blocks from template: {template_text}") + blocks = self._tokenize_internal(template_text) + if validate: + PromptTemplateEngine.validate_blocks_syntax(blocks) + return blocks + + async def render_async(self, template_text: str, context: SKContext) -> str: + self._log.debug(f"Rendering string template: {template_text}") + blocks = self.extract_blocks(template_text) + return await self.render_blocks_async(blocks, context) + + async def render_blocks_async(self, blocks: List[Block], context: SKContext) -> str: + self._log.debug(f"Rendering list of {len(blocks)} blocks") + result = "" + for block in blocks: + if block.type == BlockTypes.Text: + result += block.content + continue + + if block.type == BlockTypes.Variable: + result += block.render(context.variables) + continue + + if block.type == BlockTypes.Code: + result += await block.render_code_async(context) + continue + + raise NotImplementedError(f"Block type {block.type} is not supported") + + self._log.debug(f"Rendered prompt: {result}") + return result + + def render_variables( + self, blocks: List[Block], context: Optional[ContextVariables] + ) -> List[Block]: + self._log.debug("Rendering variables") + return list( + [ + block + if block.type != BlockTypes.Variable + else TextBlock(block.render(context), self._log) + for block in blocks + ] + ) + + async def render_code_async( + self, blocks: List[Block], context: SKContext + ) -> List[Block]: + self._log.debug("Rendering code") + + updated_blocks = [] + for block in blocks: + if block.type != BlockTypes.Code: + updated_blocks.append(block) + continue + + updated_blocks.append( + TextBlock(await block.render_code_async(context), self._log) + ) + + return updated_blocks + + @staticmethod + def validate_blocks_syntax(blocks: List[Block]) -> None: + for block in blocks: + is_valid, message = block.is_valid() + if not is_valid: + raise TemplateException( + TemplateException.ErrorCodes.SyntaxError, message + ) + + def _tokenize_internal(self, template: Optional[str]) -> List[Block]: + if template is None or template.strip() == "": + return [TextBlock("", self._log)] + + STARTER, ENDER = "{", "}" + # An empty block consists of 4 chars: "{{}}" + EMPTY_CODE_BLOCK_LENGTH = 4 + # A block shorter than 5 chars is either empty + # or invalid, e.g. "{{ }}" and "{{$}}" + MIN_CODE_BLOCK_LENGTH = EMPTY_CODE_BLOCK_LENGTH + 1 + + if len(template) < MIN_CODE_BLOCK_LENGTH: + return [TextBlock(template, self._log)] + + blocks = [] + + cursor = 0 + end_of_last_block = 0 + + start_pos = 0 + start_found = False + + while cursor < len(template): + # Utility function to get the char at the given offset + # (relative to the current cursor position) + def _get_char(offset: int = 0) -> str: + return template[cursor + offset] + + # When '{{' is found + if _get_char() == STARTER and _get_char(1) == STARTER: + start_pos = cursor + start_found = True + # When '}}' is found + elif start_found and _get_char() == ENDER and _get_char(1) == ENDER: + # If there is plain text between the current + # var/code block and the previous one, capture + # that as a text block + if start_pos > end_of_last_block: + blocks.append( + TextBlock(template[end_of_last_block:start_pos], self._log) + ) + + # Skip ahead of the second '}' of '}}' + cursor += 1 + + # Extract raw block + content_with_delims = template[start_pos : cursor + 1] + + # Remove the '{{' and '}}' delimiters and trim + content_without_delims = content_with_delims[ + len(STARTER + STARTER) : -len(ENDER + ENDER) + ].strip() + + if len(content_without_delims) == 0: + blocks.append(TextBlock(content_with_delims, self._log)) + else: + if VarBlock.has_var_prefix(content_without_delims): + blocks.append(VarBlock(content_without_delims, self._log)) + else: + blocks.append(CodeBlock(content_without_delims, self._log)) + + # Update the end of the last block + end_of_last_block = cursor + 1 + start_found = False + + # Move the cursor forward + cursor += 1 + + # If there is plain text after the last block, capture that as a text block + if end_of_last_block < len(template): + blocks.append( + TextBlock(template[end_of_last_block : len(template)], self._log) + ) + + return blocks diff --git a/python/semantic_kernel/template_engine/prompt_template_engine_base.py b/python/semantic_kernel/template_engine/prompt_template_engine_base.py new file mode 100644 index 000000000000..03332a3250d4 --- /dev/null +++ b/python/semantic_kernel/template_engine/prompt_template_engine_base.py @@ -0,0 +1,36 @@ +# Copyright (c) Microsoft. All rights reserved. + +from abc import ABC, abstractmethod +from typing import List, Optional + +from semantic_kernel.orchestration.context_variables import ContextVariables +from semantic_kernel.orchestration.sk_context import SKContext +from semantic_kernel.template_engine.blocks.block import Block + + +class PromptTemplateEngineBase(ABC): + @abstractmethod + def extract_blocks( + self, template_text: Optional[str], validate: bool = True + ) -> List[Block]: + pass + + @abstractmethod + async def render_async(self, template_text: str, context: SKContext) -> str: + pass + + @abstractmethod + async def render_blocks_async(self, blocks: List[Block], context: SKContext) -> str: + pass + + @abstractmethod + async def render_variables( + self, blocks: List[Block], context: Optional[ContextVariables] + ) -> List[Block]: + pass + + @abstractmethod + async def render_code_async( + self, blocks: List[Block], context: SKContext + ) -> List[Block]: + pass diff --git a/python/semantic_kernel/template_engine/template_exception.py b/python/semantic_kernel/template_engine/template_exception.py new file mode 100644 index 000000000000..5dec48b6ad10 --- /dev/null +++ b/python/semantic_kernel/template_engine/template_exception.py @@ -0,0 +1,42 @@ +# Copyright (c) Microsoft. All rights reserved. + +from enum import Enum +from typing import Optional + +from semantic_kernel.diagnostics.sk_exception import SKException + + +class TemplateException(SKException): + class ErrorCodes(Enum): + # Unknown. + Unknown = -1 + # Syntax error. + SyntaxError = 0 + + # The error code. + _error_code: ErrorCodes + + def __init__( + self, + error_code: ErrorCodes, + message: str, + inner_exception: Optional[Exception] = None, + ) -> None: + """Initializes a new instance of the TemplateException class. + + Arguments: + error_code {ErrorCodes} -- The error code. + message {str} -- The error message. + inner_exception {Exception} -- The inner exception. + """ + super().__init__(error_code, message, inner_exception) + self._error_code = error_code + + @property + def error_code(self) -> ErrorCodes: + """Gets the error code. + + Returns: + ErrorCodes -- The error code. + """ + return self._error_code diff --git a/python/semantic_kernel/utils/null_logger.py b/python/semantic_kernel/utils/null_logger.py new file mode 100644 index 000000000000..86f8b48cc913 --- /dev/null +++ b/python/semantic_kernel/utils/null_logger.py @@ -0,0 +1,24 @@ +# Copyright (c) Microsoft. All rights reserved. + +from logging import Logger + + +class NullLogger(Logger): + """ + A logger that does nothing. + """ + + def __init__(self) -> None: + pass + + def debug(self, _: str) -> None: + pass + + def info(self, _: str) -> None: + pass + + def warning(self, _: str) -> None: + pass + + def error(self, _: str) -> None: + pass diff --git a/python/semantic_kernel/utils/settings.py b/python/semantic_kernel/utils/settings.py new file mode 100644 index 000000000000..e8fcfe7e6943 --- /dev/null +++ b/python/semantic_kernel/utils/settings.py @@ -0,0 +1,62 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Optional, Tuple + + +def openai_settings_from_dot_env() -> Tuple[str, Optional[str]]: + """ + Reads the OpenAI API key and organization ID from the .env file. + + Returns: + Tuple[str, str]: The OpenAI API key, the OpenAI organization ID + """ + + api_key, org_id = None, None + with open(".env", "r") as f: + lines = f.readlines() + + for line in lines: + if line.startswith("OPENAI_API_KEY"): + parts = line.split("=")[1:] + api_key = "=".join(parts).strip().strip('"') + break + + if line.startswith("OPENAI_ORG_ID"): + parts = line.split("=")[1:] + org_id = "=".join(parts).strip().strip('"') + break + + assert api_key is not None, "OpenAI API key not found in .env file" + + # It's okay if the org ID is not found (not required) + return api_key, org_id + + +def azure_openai_settings_from_dot_env() -> Tuple[str, str]: + """ + Reads the Azure OpenAI API key and endpoint from the .env file. + + Returns: + Tuple[str, str]: The Azure OpenAI API key, the endpoint + """ + + api_key, endpoint = None, None + with open(".env", "r") as f: + lines = f.readlines() + + for line in lines: + if line.startswith("AZURE_OPENAI_API_KEY"): + parts = line.split("=")[1:] + api_key = "=".join(parts).strip().strip('"') + break + + if line.startswith("AZURE_OPENAI_ENDPOINT"): + parts = line.split("=")[1:] + endpoint = "=".join(parts).strip().strip('"') + break + + # Azure requires both the API key and the endpoint URL. + assert api_key is not None, "Azure OpenAI API key not found in .env file" + assert endpoint is not None, "Azure OpenAI endpoint not found in .env file" + + return api_key, endpoint diff --git a/python/semantic_kernel/utils/static_property.py b/python/semantic_kernel/utils/static_property.py new file mode 100644 index 000000000000..bebc95f0e0fa --- /dev/null +++ b/python/semantic_kernel/utils/static_property.py @@ -0,0 +1,8 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Any + + +class static_property(staticmethod): + def __get__(self, obj: Any, obj_type: Any = None) -> Any: + return super().__get__(obj, obj_type)() diff --git a/python/tests/__init__.py b/python/tests/__init__.py new file mode 100644 index 000000000000..2a50eae89411 --- /dev/null +++ b/python/tests/__init__.py @@ -0,0 +1 @@ +# Copyright (c) Microsoft. All rights reserved. diff --git a/python/tests/basics.py b/python/tests/basics.py new file mode 100644 index 000000000000..d060b0028aa2 --- /dev/null +++ b/python/tests/basics.py @@ -0,0 +1,49 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +import semantic_kernel as sk + +kernel = sk.create_kernel() + +# Load credentials from .env file +api_key, org_id = sk.openai_settings_from_dot_env() + +# Configure LLM backend +kernel.config.add_openai_completion_backend( + "davinci-003", "text-davinci-003", api_key, org_id +) + +# Define semantic function using SK prompt template language +sk_prompt = """ +{{$input}} + +Give me the TLDR in 5 words. +""" + +# Create the semantic function +tldr_function = sk.extensions.create_semantic_function( + kernel, sk_prompt, max_tokens=200, temperature=0, top_p=0.5 +) + +# User input +text_to_summarize = """ + 1) A robot may not injure a human being or, through inaction, + allow a human being to come to harm. + + 2) A robot must obey orders given it by human beings except where + such orders would conflict with the First Law. + + 3) A robot must protect its own existence as long as such protection + does not conflict with the First or Second Law. +""" + +print("Summarizing: ") +print(text_to_summarize) +print() + +# Summarize and print +summary = asyncio.run(kernel.run_on_str_async(text_to_summarize, tldr_function)) + +output = str(summary).strip() +print(f"Summary is: '{output}'") diff --git a/python/tests/chat.py b/python/tests/chat.py new file mode 100644 index 000000000000..35f168be4e34 --- /dev/null +++ b/python/tests/chat.py @@ -0,0 +1,68 @@ +# Copyright (c) Microsoft. All rights reserved. + +import asyncio + +import semantic_kernel as sk + +sk_prompt = """ +ChatBot can have a conversation with you about any topic. +It can give explicit instructions or say 'I don't know' +when it doesn't know the answer. + +{{$chat_history}} +User:> {{$user_input}} +ChatBot:> +""" + +kernel = sk.create_kernel() + +api_key, org_id = sk.openai_settings_from_dot_env() +kernel.config.add_openai_completion_backend( + "davinci-003", "text-davinci-003", api_key, org_id +) + +prompt_config = sk.PromptTemplateConfig.from_completion_parameters( + max_tokens=2000, temperature=0.7, top_p=0.4 +) + +prompt_template = sk.PromptTemplate( + sk_prompt, kernel.prompt_template_engine, prompt_config +) + +function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template) +chat_function = kernel.register_semantic_function("ChatBot", "Chat", function_config) + + +async def chat() -> None: + context = sk.ContextVariables() + context["chat_history"] = "" + + try: + user_input = input("User:> ") + context["user_input"] = user_input + except KeyboardInterrupt: + print("\n\nExiting chat...") + return False + except EOFError: + print("\n\nExiting chat...") + return False + + if user_input == "exit": + print("\n\nExiting chat...") + return False + + answer = await kernel.run_on_vars_async(context, chat_function) + context["chat_history"] += f"\nUser:> {user_input}\nChatBot:> {answer}\n" + + print(f"ChatBot:> {answer}") + return True + + +async def main() -> None: + chatting = True + while chatting: + chatting = await chat() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/python/tests/memory.py b/python/tests/memory.py new file mode 100644 index 000000000000..9f783f56a963 --- /dev/null +++ b/python/tests/memory.py @@ -0,0 +1,153 @@ +# Copyright (c) Microsoft. All rights reserved. + +# TODO: fix/complete the memory integration + +import asyncio +from typing import Tuple + +import semantic_kernel as sk +from semantic_kernel.core_skills import TextMemorySkill + + +def build_kernel() -> sk.KernelBase: + # Setup kernel with OpenAI completion and embedding backends + api_key, org_id = sk.openai_settings_from_dot_env() + + kernel = ( + sk.kernel_builder() + .configure( + lambda c: c.add_openai_completion_backend( + "davinci-003", "text-davinci-003", api_key, org_id + ) + ) + .configure( + lambda c: c.add_open_ai_embeddings_backend( + "ada-002", "text-embedding-ada-002", api_key, org_id + ) + ) + .with_memory_storage(sk.memory.VolatileMemoryStore()) + .build() + ) + + kernel.import_skill(TextMemorySkill) + + return kernel + + +async def populate_memory(kernel: sk.KernelBase) -> None: + # Add some documents to the semantic memory + await kernel.memory.save_information_async( + "aboutMe", id="info1", text="My name is Andrea" + ) + await kernel.memory.save_information_async( + "aboutMe", id="info2", text="I currently work as a tour guide" + ) + await kernel.memory.save_information_async( + "aboutMe", id="info3", text="I've been living in Seattle since 2005" + ) + await kernel.memory.save_information_async( + "aboutMe", id="info4", text="I visited France and Italy five times since 2015" + ) + await kernel.memory.save_information_async( + "aboutMe", id="info5", text="My family is from New York" + ) + + +async def search_memory_examples(kernel: sk.KernelBase) -> None: + questions = [ + "what's my name", + "where do I live?", + "where's my family from?", + "where have I traveled?", + "what do I do for work", + ] + + for question in questions: + print(f"Question: {question}") + result = await kernel.memory.search_async("aboutMe", question) + print(f"Answer: {result[0].text}\n") + + +async def setup_chat_with_memory( + kernel: sk.KernelBase, +) -> Tuple[sk.SKFunctionBase, sk.SKContext]: + sk_prompt = """ + ChatBot can have a conversation with you about any topic. + It can give explicit instructions or say 'I don't know' if + it does not have an answer. + + Information about me, from previous conversations: + - {{$fact1}} {{recall $fact1}} + - {{$fact2}} {{recall $fact2}} + - {{$fact3}} {{recall $fact3}} + - {{$fact4}} {{recall $fact4}} + - {{$fact5}} {{recall $fact5}} + + Chat: + {{$chat_history}} + User: {{$user_input}} + ChatBot: """.strip() + + chat_func = sk.extensions.create_semantic_function( + kernel, sk_prompt, max_tokens=200, temperature=0.8 + ) + + context = kernel.create_new_context() + context["fact1"] = "what is my name?" + context["fact2"] = "where do I live?" + context["fact3"] = "where's my family from?" + context["fact4"] = "where have I traveled?" + context["fact5"] = "what do I do for work?" + + context[TextMemorySkill.COLLECTION_PARAM] = "aboutMe" + context[TextMemorySkill.RELEVANCE_PARAM] = 0.8 + + context["chat_history"] = "" + + return chat_func, context + + +async def chat( + kernel: sk.KernelBase, chat_func: sk.SKFunctionBase, context: sk.SKContext +) -> bool: + try: + user_input = input("User:> ") + context["user_input"] = user_input + except KeyboardInterrupt: + print("\n\nExiting chat...") + return False + except EOFError: + print("\n\nExiting chat...") + return False + + if user_input == "exit": + print("\n\nExiting chat...") + return False + + answer = await kernel.run_on_vars_async(context.variables, chat_func) + context["chat_history"] += f"\nUser:> {user_input}\nChatBot:> {answer}\n" + + print(f"ChatBot:> {answer}") + return True + + +async def main() -> None: + kernel = build_kernel() + + print("Populating memory...") + await populate_memory(kernel) + + print("Asking questions... (manually)") + await search_memory_examples(kernel) + + print("Setting up a chat (with memory!)") + chat_func, context = await setup_chat_with_memory(kernel) + + print("Begin chatting (type 'exit' to exit):\n") + chatting = True + while chatting: + chatting = await chat(kernel, chat_func, context) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/samples/notebooks/python/1-basic-loading-the-kernel.ipynb b/samples/notebooks/python/1-basic-loading-the-kernel.ipynb new file mode 100644 index 000000000000..d43c1d31d882 --- /dev/null +++ b/samples/notebooks/python/1-basic-loading-the-kernel.ipynb @@ -0,0 +1,175 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Basic Loading of the Kernel" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To run the notebooks we recommend using Poetry and starting a shell with a virtual environment\n", + "prepared to use SK. \n", + "\n", + "See [DEV_SETUP.md](../../../python/DEV_SETUP.md) for more information." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from init import *\n", + "import semantic_kernel as sk" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can instantiate the kernel in a few ways, depending on your use case." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Simple instance\n", + "kernel_1 = sk.KernelBuilder.create_kernel()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Instance with a custom logger\n", + "my_logger = sk.NullLogger()\n", + "kernel_2 = (\n", + " sk.kernel_builder()\n", + " .with_logger(my_logger)\n", + " .build()\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Instance with a custom configuration\n", + "my_config = sk.KernelConfig()\n", + "kernel_2 = (\n", + " sk.kernel_builder()\n", + " .with_configuration(my_config)\n", + " .build()\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "When using the kernel for AI requests, the kernel needs some settings like URL and credentials to the AI models.\n", + "\n", + "The SDK currently supports OpenAI and Azure OpenAI, other services will be added over time.\n", + "\n", + "If you need an Azure OpenAI key, go [here](https://learn.microsoft.com/en-us/azure/cognitive-services/openai/quickstart?pivots=rest-api)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "kernel = sk.KernelBuilder.create_kernel()\n", + "\n", + "kernel.config.add_azure_openai_completion_backend(\n", + " \"Azure_curie\", # alias used in the prompt templates' config.json\n", + " \"my-finetuned-Curie\", # Azure OpenAI *Deployment ID*\n", + " \"https://contoso.openai.azure.com/\", # Azure OpenAI *Endpoint*\n", + " \"...your Azure OpenAI Key...\" # Azure OpenAI *Key*\n", + ")\n", + "\n", + "kernel.config.add_openai_completion_backend(\n", + " \"OpenAI_davinci\", # alias used in the prompt templates' config.json\n", + " \"text-davinci-003\", # OpenAI Model Name\n", + " \"...your OpenAI API Key...\", # OpenAI API key\n", + " \"...your OpenAI Org ID...\" # *optional* OpenAI Organization ID\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "When working with multiple backends and multiple models, the **first backend** defined\n", + "is also the \"**default**\" used in these scenarios:\n", + "\n", + "* a prompt configuration doesn't specify which AI backend to use\n", + "* a prompt configuration requires a backend unknown to the kernel\n", + "\n", + "The default can be set and changed programmatically:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "kernel.config.set_default_completion_backend(\"Azure_curie\");" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Great, now that you're familiar with setting up the Semantic Kernel, let's see [how we can use it to run prompts](2-running-prompts-from-file.ipynb)." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.9" + }, + "polyglot_notebook": { + "kernelInfo": { + "items": [ + { + "aliases": [ + "frontend" + ], + "name": "vscode" + } + ] + } + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/samples/notebooks/python/2-running-prompts-from-file.ipynb b/samples/notebooks/python/2-running-prompts-from-file.ipynb new file mode 100644 index 000000000000..5d3521fc9dbb --- /dev/null +++ b/samples/notebooks/python/2-running-prompts-from-file.ipynb @@ -0,0 +1,176 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "692e361b", + "metadata": {}, + "source": [ + "# How to run a semantic skills from file\n", + "Now that you're familiar with Kernel basics, let's see how the kernel allows you to run Semantic Skills and Semantic Functions stored on disk. \n", + "\n", + "A Semantic Skill is a collection of Semantic Functions, where each function is defined with natural language that can be provided with a text file. \n", + "\n", + "Refer to our [glossary](https://github.com/microsoft/semantic-kernel/blob/main/docs/GLOSSARY.md) for an in-depth guide to the terms.\n", + "\n", + "The repository includes some examples under the [samples](https://github.com/microsoft/semantic-kernel/tree/main/samples) folder.\n", + "\n", + "For instance, [this](../../skills/FunSkill/Joke/skprompt.txt) is the **Joke function** part of the **FunSkill skill**:" + ] + }, + { + "cell_type": "markdown", + "id": "f3ce1efe", + "metadata": {}, + "source": [ + "```\n", + "WRITE EXACTLY ONE JOKE or HUMOROUS STORY ABOUT THE TOPIC BELOW.\n", + "JOKE MUST BE:\n", + "- G RATED\n", + "- WORKPLACE/FAMILY SAFE\n", + "NO SEXISM, RACISM OR OTHER BIAS/BIGOTRY.\n", + "BE CREATIVE AND FUNNY. I WANT TO LAUGH.\n", + "+++++\n", + "{{$input}}\n", + "+++++\n", + "```" + ] + }, + { + "cell_type": "markdown", + "id": "afdb96d6", + "metadata": {}, + "source": [ + "Note the special **`{{$input}}`** token, which is a variable that is automatically passed when invoking the function, commonly referred to as a \"function parameter\". \n", + "\n", + "We'll explore later how functions can accept multiple variables, as well as invoke other functions." + ] + }, + { + "cell_type": "markdown", + "id": "c3bd5134", + "metadata": {}, + "source": [ + "\n", + "In the same folder you'll notice a second [config.json](../../skills/FunSkill/Joke/config.json) file. The file is optional, and is used to set some parameters for large language models like Temperature, TopP, Stop Sequences, etc.\n", + "\n", + "```\n", + "{\n", + " \"schema\": 1,\n", + " \"type\": \"completion\",\n", + " \"description\": \"Generate a funny joke\",\n", + " \"completion\": {\n", + " \"max_tokens\": 500,\n", + " \"temperature\": 0.5,\n", + " \"top_p\": 0.5\n", + " }\n", + "}\n", + "```" + ] + }, + { + "cell_type": "markdown", + "id": "384ff07f", + "metadata": {}, + "source": [ + "Given a semantic function defined by these files, this is how to load and use a file based semantic function.\n", + "\n", + "Load and configure the kernel, as usual, loading also the AI backend settings defined in the [Setup notebook](0-AI-settings.ipynb):" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b0062a24", + "metadata": {}, + "outputs": [], + "source": [ + "from init import *\n", + "import semantic_kernel as sk\n", + "\n", + "kernel = sk.KernelBuilder.create_kernel()\n", + "\n", + "useAzureOpenAI = False\n", + "model = \"text-davinci-002\"\n", + "\n", + "# Configure AI backend used by the kernel\n", + "if useAzureOpenAI:\n", + " api_key, endpoint = azure_openai_settings_from_dot_env()\n", + " kernel.config.add_azure_openai_completion_backend(\"davinci\", model, endpoint, api_key, overwrite = True)\n", + "else:\n", + " api_key, org_id = openai_settings_from_dot_env()\n", + " kernel.config.add_openai_completion_backend(\"davinci\", model, api_key, org_id, overwrite=True)\n" + ] + }, + { + "cell_type": "markdown", + "id": "fd5ff1f4", + "metadata": {}, + "source": [ + "Import the skill and all its functions:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "56ee184d", + "metadata": {}, + "outputs": [], + "source": [ + "# note: using skills from the samples folder\n", + "from semantic_kernel.kernel_extensions.import_semantic_skill_from_directory import import_semantic_skill_from_directory\n", + "\n", + "skills_directory = \"../../skills\"\n", + "skill = import_semantic_skill_from_directory(kernel, skills_directory, \"FunSkill\")" + ] + }, + { + "cell_type": "markdown", + "id": "edd99fa0", + "metadata": {}, + "source": [ + "How to use the skill functions, e.g. generate a joke about \"*time travel to dinosaur age*\":" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6effe63b", + "metadata": {}, + "outputs": [], + "source": [ + "result = await (kernel.run_on_str_async(\"time travel to dinosaur age\", skill[\"Joke\"]))\n", + "\n", + "print(result)" + ] + }, + { + "cell_type": "markdown", + "id": "2281a1fc", + "metadata": {}, + "source": [ + "Great, now that you know how to load a skill from disk, let's show how you can [create and run a semantic function inline.](./3-semantic-function-inline.ipynb)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/samples/notebooks/python/3-semantic-function-inline.ipynb b/samples/notebooks/python/3-semantic-function-inline.ipynb new file mode 100644 index 000000000000..1b9df2bc35d8 --- /dev/null +++ b/samples/notebooks/python/3-semantic-function-inline.ipynb @@ -0,0 +1,307 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "3c93ac5b", + "metadata": {}, + "source": [ + "# Running Semantic Functions Inline" + ] + }, + { + "cell_type": "markdown", + "id": "40201641", + "metadata": {}, + "source": [ + "The [previous notebook](./2-running-prompts-from-file.ipynb)\n", + "showed how to define a semantic function using a prompt template stored on a file.\n", + "\n", + "In this notebook, we'll show how to use the Semantic Kernel to define functions inline with your C# code. This can be useful in a few scenarios:\n", + "\n", + "* Dynamically generating the prompt using complex rules at runtime\n", + "* Writing prompts by editing C# code instead of TXT files. \n", + "* Easily creating demos, like this document\n", + "\n", + "Prompt templates are defined using the SK template language, which allows to reference variables and functions. Read [this doc](https://aka.ms/sk/howto/configurefunction) to learn more about the design decisions for prompt templating. \n", + "\n", + "For now we'll use only the `{{$input}}` variable, and see more complex templates later.\n", + "\n", + "Almost all semantic function prompts have a reference to `{{$input}}`, which is the default way\n", + "a user can import content from the context variables." + ] + }, + { + "cell_type": "markdown", + "id": "d90b0c13", + "metadata": {}, + "source": [ + "Prepare a semantic kernel instance first, loading also the AI backend settings defined in the [Setup notebook](0-AI-settings.ipynb):" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3712b7c3", + "metadata": {}, + "outputs": [], + "source": [ + "from init import *\n", + "import semantic_kernel as sk\n", + "from semantic_kernel.kernel_extensions.inline_function_definitions import create_semantic_function\n", + "\n", + "kernel = sk.KernelBuilder.create_kernel()\n", + "\n", + "useAzureOpenAI = False\n", + "model = \"text-davinci-002\"\n", + "\n", + "# Configure AI backend used by the kernel\n", + "if useAzureOpenAI:\n", + " api_key, endpoint = azure_openai_settings_from_dot_env()\n", + " kernel.config.add_azure_openai_completion_backend(\"davinci\", model, endpoint, api_key, overwrite=True)\n", + "else:\n", + " api_key, org_id = openai_settings_from_dot_env()\n", + " kernel.config.add_openai_completion_backend(\"davinci\", model, api_key, org_id, overwrite = True)" + ] + }, + { + "cell_type": "markdown", + "id": "589733c5", + "metadata": {}, + "source": [ + "Let's create a semantic function used to summarize content:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "08188a05", + "metadata": {}, + "outputs": [], + "source": [ + "sk_prompt = \"\"\"\n", + "{{$input}}\n", + "\n", + "Summarize the content above.\n", + "\"\"\"" + ] + }, + { + "cell_type": "markdown", + "id": "fd19ac11", + "metadata": {}, + "source": [ + "Let's configure the prompt, e.g. allowing for some creativity and a sufficient number of tokens." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dcb339ce", + "metadata": {}, + "outputs": [], + "source": [ + "prompt_config = sk.PromptTemplateConfig.from_completion_parameters(\n", + " max_tokens=2000, temperature=0.2, top_p=0.5\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "05e275a6", + "metadata": {}, + "source": [ + "The following code prepares an instance of the template, passing in the TXT and configuration above, \n", + "and a couple of other parameters (how to render the TXT and how the template can access other functions)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "76b9d28a", + "metadata": {}, + "outputs": [], + "source": [ + "prompt_template = sk.PromptTemplate(\n", + " sk_prompt, kernel.prompt_template_engine, prompt_config\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "e296d572", + "metadata": {}, + "source": [ + "Let's transform the prompt template into a function that the kernel can execute:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ae29c207", + "metadata": {}, + "outputs": [], + "source": [ + "function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template)\n", + "\n", + "summary_function = kernel.register_semantic_function(\"MySkill\", \"Summary\", function_config)" + ] + }, + { + "cell_type": "markdown", + "id": "f26b90c4", + "metadata": {}, + "source": [ + "Set up some content to summarize, here's an extract about Demo, an ancient Greek poet, taken from Wikipedia (https://en.wikipedia.org/wiki/Demo_(ancient_Greek_poet)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "314557fb", + "metadata": {}, + "outputs": [], + "source": [ + "input_text = \"\"\"\n", + "Demo (ancient Greek poet)\n", + "From Wikipedia, the free encyclopedia\n", + "Demo or Damo (Greek: Δεμώ, Δαμώ; fl. c. AD 200) was a Greek woman of the Roman period, known for a single epigram, engraved upon the Colossus of Memnon, which bears her name. She speaks of herself therein as a lyric poetess dedicated to the Muses, but nothing is known of her life.[1]\n", + "Identity\n", + "Demo was evidently Greek, as her name, a traditional epithet of Demeter, signifies. The name was relatively common in the Hellenistic world, in Egypt and elsewhere, and she cannot be further identified. The date of her visit to the Colossus of Memnon cannot be established with certainty, but internal evidence on the left leg suggests her poem was inscribed there at some point in or after AD 196.[2]\n", + "Epigram\n", + "There are a number of graffiti inscriptions on the Colossus of Memnon. Following three epigrams by Julia Balbilla, a fourth epigram, in elegiac couplets, entitled and presumably authored by \"Demo\" or \"Damo\" (the Greek inscription is difficult to read), is a dedication to the Muses.[2] The poem is traditionally published with the works of Balbilla, though the internal evidence suggests a different author.[1]\n", + "In the poem, Demo explains that Memnon has shown her special respect. In return, Demo offers the gift for poetry, as a gift to the hero. At the end of this epigram, she addresses Memnon, highlighting his divine status by recalling his strength and holiness.[2]\n", + "Demo, like Julia Balbilla, writes in the artificial and poetic Aeolic dialect. The language indicates she was knowledgeable in Homeric poetry—'bearing a pleasant gift', for example, alludes to the use of that phrase throughout the Iliad and Odyssey.[a][2] \n", + "\"\"\"" + ] + }, + { + "cell_type": "markdown", + "id": "bf0f2330", + "metadata": {}, + "source": [ + "...and run the summary function:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7b0e3b0c", + "metadata": {}, + "outputs": [], + "source": [ + "summary = await kernel.run_on_str_async(input_text, summary_function)\n", + "output = str(summary.variables).strip()\n", + "print(output)" + ] + }, + { + "cell_type": "markdown", + "id": "5b061ca9", + "metadata": {}, + "source": [ + "The code above shows all the steps, to understand how the function is composed step by step. However, the kernel\n", + "includes also some helpers to achieve the same more concisely.\n", + "\n", + "The same function above can be created with less code:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "91c23029", + "metadata": {}, + "outputs": [], + "source": [ + "sk_prompt = \"\"\"\n", + "{{$input}}\n", + "\n", + "Summarize the content above.\n", + "\"\"\"\n", + "\n", + "summary_function = create_semantic_function(kernel, sk_prompt, max_tokens=2000, temperature=0.2, top_p=0.5)\n", + "\n", + "summary = await kernel.run_on_str_async(input_text, summary_function)\n", + "\n", + "output = str(summary.variables).strip()\n", + "print(output)" + ] + }, + { + "cell_type": "markdown", + "id": "4777f447", + "metadata": {}, + "source": [ + "Here's one more example of how to write an inline Semantic Function that gives a TLDR for a piece of text.\n", + "\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ea8128c8", + "metadata": {}, + "outputs": [], + "source": [ + "kernel = sk.create_kernel()\n", + "\n", + "api_key, org_id = openai_settings_from_dot_env()\n", + "\n", + "kernel.config.add_openai_completion_backend(\n", + " \"davinci\", \"text-davinci-003\", api_key, org_id, overwrite=True)\n", + "\n", + "sk_prompt = \"\"\"\n", + "{{$input}}\n", + "\n", + "Give me the TLDR in 5 words.\n", + "\"\"\"\n", + "\n", + "text_to_summarize = \"\"\"\n", + " 1) A robot may not injure a human being or, through inaction,\n", + " allow a human being to come to harm.\n", + "\n", + " 2) A robot must obey orders given it by human beings except where\n", + " such orders would conflict with the First Law.\n", + "\n", + " 3) A robot must protect its own existence as long as such protection\n", + " does not conflict with the First or Second Law.\n", + "\"\"\"\n", + "\n", + "tldr_function = sk.extensions.create_semantic_function(\n", + " kernel,\n", + " sk_prompt,\n", + " max_tokens=200,\n", + " temperature=0,\n", + " top_p=0.5,\n", + ")\n", + "\n", + "summary = await kernel.run_on_str_async(text_to_summarize, tldr_function)\n", + "output = str(summary.variables).strip()\n", + "print(\"Output: \" + output)\n", + "\n", + "# Output: Protect humans, follow orders, survive." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/samples/notebooks/python/4-context-variables-chat.ipynb b/samples/notebooks/python/4-context-variables-chat.ipynb new file mode 100644 index 000000000000..197fd7e2baf7 --- /dev/null +++ b/samples/notebooks/python/4-context-variables-chat.ipynb @@ -0,0 +1,252 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "fde98ddf", + "metadata": {}, + "source": [ + "# Creating a basic chat experience with context variables\n", + "\n", + "In this example, we show how you can build a simple chat bot by sending and updating context with your requests. \n", + "\n", + "We introduce the Context Variables object which in this demo functions similarly as a key-value store that you can use when running the kernel.\n", + "\n", + "The context is local (i.e. in your computer's RAM) and not persisted anywhere beyond the life of this Jupyter session.\n", + "\n", + "In future examples, we will show how to persist the context on disk so that you can bring it into your applications. \n", + "\n", + "In this chat scenario, as the user talks back and forth with the bot, the context gets populated with the history of the conversation. During each new run of the kernel, the context can provide the AI with its variables' content. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "68301108", + "metadata": {}, + "outputs": [], + "source": [ + "from init import *\n", + "import semantic_kernel as sk\n", + "\n", + "kernel = sk.KernelBuilder.create_kernel()\n", + "\n", + "useAzureOpenAI = False\n", + "model = \"text-davinci-003\"\n", + "\n", + "# Configure AI backend used by the kernel\n", + "if (useAzureOpenAI):\n", + " api_key, endpoint = azure_openai_settings_from_dot_env()\n", + " kernel.config.add_azure_openai_completion_backend(\"davinci\", model, endpoint, api_key, overwrite=True)\n", + "else:\n", + " api_key, org_id = openai_settings_from_dot_env()\n", + " kernel.config.add_openai_completion_backend(\"davinci\", model, api_key, org_id, overwrite=True)" + ] + }, + { + "cell_type": "markdown", + "id": "7971783d", + "metadata": {}, + "source": [ + "Let's define a prompt outlining a dialogue chat bot." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e84a05fc", + "metadata": {}, + "outputs": [], + "source": [ + "sk_prompt = \"\"\"\n", + "ChatBot can have a conversation with you about any topic.\n", + "It can give explicit instructions or say 'I don't know' if it does not have an answer.\n", + "\n", + "{{$history}}\n", + "Human: {{$human_input}}\n", + "ChatBot:\";\n", + "\"\"\"\n", + "\n", + "prompt_config = sk.PromptTemplateConfig.from_completion_parameters(\n", + " max_tokens=2000, temperature=0.7, top_p=0.5)" + ] + }, + { + "cell_type": "markdown", + "id": "61716b16", + "metadata": {}, + "source": [ + "Register your semantic function" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a3e4b160", + "metadata": {}, + "outputs": [], + "source": [ + "prompt_template = sk.PromptTemplate(sk_prompt, kernel.prompt_template_engine, prompt_config)\n", + "\n", + "function_config = sk.SemanticFunctionConfig(prompt_config, prompt_template)\n", + "chat_function = kernel.register_semantic_function(\"ChatBot\", \"Chat\", function_config)" + ] + }, + { + "cell_type": "markdown", + "id": "6e8a676f", + "metadata": {}, + "source": [ + "Initialize your context" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a4be7394", + "metadata": {}, + "outputs": [], + "source": [ + "context = sk.ContextVariables()\n", + "\n", + "history = \"\"\n", + "context.set(\"history\", history)" + ] + }, + { + "cell_type": "markdown", + "id": "4ce7c497", + "metadata": {}, + "source": [ + "Chat with the Bot" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5ec41eb8", + "metadata": {}, + "outputs": [], + "source": [ + "human_input = \"Hi, I'm looking for book suggestions\"\n", + "context.set(\"human_input\", human_input)\n", + "\n", + "bot_answer = await kernel.run_on_str_async(context, chat_function)\n", + "print(bot_answer)" + ] + }, + { + "cell_type": "markdown", + "id": "a5b03748", + "metadata": {}, + "source": [ + "Update the history with the output" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f50f517d", + "metadata": {}, + "outputs": [], + "source": [ + "history += \"\\nHuman: {human_input}\\nMelody: {bot_answer}\\n\"\n", + "context.update(history);\n", + "\n", + "print(context)" + ] + }, + { + "cell_type": "markdown", + "id": "23a2eb02", + "metadata": {}, + "source": [ + "Keep Chatting!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c59efe45", + "metadata": {}, + "outputs": [], + "source": [ + "history = \"\"\n", + "async def chat(input_text: str) -> str:\n", + " global history\n", + " # Save new message in the context variables\n", + " context.set(\"human_input\", input_text)\n", + "\n", + " # Process the user message and get an answer\n", + " answer = await kernel.run_on_str_async(context, chat_function)\n", + "\n", + " # Append the new interaction to the chat history\n", + " history = history + f\"\\nHuman: {input_text}\\nMelody: {answer}\\n\"\n", + " context.set(\"history\", history)\n", + "\n", + " # Show the response\n", + " print(context)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "06ee244e", + "metadata": {}, + "outputs": [], + "source": [ + "await chat(\"I love history and philosophy, I'd like to learn something new about Greece, any suggestion?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "82be4e7e", + "metadata": {}, + "outputs": [], + "source": [ + "await chat(\"that sounds interesting, what is it about?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "82fe0139", + "metadata": {}, + "outputs": [], + "source": [ + "await chat(\"if I read that book, what exactly will I learn about Greece history?\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "55b3a9f2", + "metadata": {}, + "outputs": [], + "source": [ + "await chat(\"could you list some more books I could read about this topic?\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/samples/notebooks/python/5-using-the-planner.ipynb b/samples/notebooks/python/5-using-the-planner.ipynb new file mode 100644 index 000000000000..9e75e69a4b9b --- /dev/null +++ b/samples/notebooks/python/5-using-the-planner.ipynb @@ -0,0 +1,260 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "99a80181", + "metadata": {}, + "source": [ + "# ⚠️ PLANNER NOT YET IMPLEMENTED IN PYTHON\n", + "\n", + "# Introduction to the Planner\n", + "\n", + "The Planner is one of the fundamental concepts of the Semantic Kernel. It makes use of the collection of skills that have been registered to the kernel and using AI, will formulate a plan to execute a given ask.\n", + "\n", + "Read more about it [here](https://aka.ms/sk/concepts/planner)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "11e59885", + "metadata": {}, + "outputs": [], + "source": [ + "from init import *\n", + "import semantic_kernel as sk\n", + "\n", + "kernel = sk.KernelBuilder.create_kernel()\n", + "\n", + "useAzureOpenAI = False\n", + "model = \"text-davinci-002\"\n", + "\n", + "# Configure AI backend used by the kernel\n", + "if (useAzureOpenAI):\n", + " api_key, endpoint = azure_openai_settings_from_dot_env()\n", + " kernel.config.add_azure_openai_completion_backend(\"davinci\", model, endpoint, api_key)\n", + "else:\n", + " api_key, org_id = openai_settings_from_dot_env()\n", + " kernel.config.add_openai_completion_backend(\"davinci\", model, api_key, org_id)" + ] + }, + { + "cell_type": "markdown", + "id": "9c885ced", + "metadata": {}, + "source": [ + "### Setting Up the Planner\n", + "The planner is located in the Semantic Kernel's CoreSkills and requires Orchestration" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "948d50fa", + "metadata": {}, + "outputs": [], + "source": [ + "from semantic_kernel.core_skills import PlannerSkill\n", + "from semantic_kernel.kernel_extensions.import_semantic_skill_from_directory import import_semantic_skill_from_directory\n", + "\n", + "# Load native skill into the kernel registry, sharing its functions with prompt templates\n", + "planner = kernel.import_skill(PlannerSkill(kernel))" + ] + }, + { + "cell_type": "markdown", + "id": "147587de", + "metadata": {}, + "source": [ + "You can see that the Planner took my ask and converted it into an XML-based plan detailing\n", + "how the AI would go about solving this task, making use of the skills that the Kernel has available to it." + ] + }, + { + "cell_type": "markdown", + "id": "a5d86739", + "metadata": {}, + "source": [ + "### Providing skills to the planner\n", + "The planner needs to know what skills are available to it. Here we'll give it access to the `SummarizeSkill` and `WriterSkill` we have defined on disk." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ca0e7604", + "metadata": {}, + "outputs": [], + "source": [ + "skills_directory = \"../../skills\"\n", + "skill = import_semantic_skill_from_directory(kernel, skills_directory, \"SummarizeSkill\")\n", + "skill = import_semantic_skill_from_directory(kernel, skills_directory, \"WriterSkill\")" + ] + }, + { + "cell_type": "markdown", + "id": "deff5675", + "metadata": {}, + "source": [ + "Define your ASK. What do you want the Kernel to do?" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0d537981", + "metadata": {}, + "outputs": [], + "source": [ + "ask = \"Tomorrow is Valentine's day. I need to come up with a few date ideas and e-mail them to my significant other.\";\n", + "originalPlan = await kernel.run_on_str_async(ask, planner[\"CreatePlan\"])\n", + "\n", + "print(\"Original plan:\\n\");\n", + "print(originalPlan.variales.to_plan().plan_string);" + ] + }, + { + "cell_type": "markdown", + "id": "1318dc72", + "metadata": {}, + "source": [ + "As you can see in the above plan, the AI has determined which functions to call \n", + "in order to fulfill the user ask. The output of each step of the plan gets set as `setContextVariable` which makes it available as `input` to the next skill. " + ] + }, + { + "cell_type": "markdown", + "id": "a64c88a7", + "metadata": {}, + "source": [ + "Let's also define an inline skill and have it be available to the Planner.\n", + "Be sure to give it a function name and skill name." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e973007f", + "metadata": {}, + "outputs": [], + "source": [ + "sk_prompt = \"\"\"\n", + "{{$input}}\n", + "\n", + "Rewrite the above in the style of Shakespeare.\n", + "\"\"\"\n", + "shakespeareFunction = kernel.CreateSemanticFunction(sk_prompt, \"shakespeare\", \"ShakespeareSkill\", max_tokens: 2000, temperature: 0.2, top_p: 0.5);" + ] + }, + { + "cell_type": "markdown", + "id": "305743f5", + "metadata": {}, + "source": [ + "Let's update our ask using this new skill." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bfd23ac4", + "metadata": {}, + "outputs": [], + "source": [ + "ask = \"\"\"Tomorrow is Valentine's day. I need to come up with a few date ideas.\n", + " She likes Shakespeare so write using his style. E-mail these ideas to my significant other\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f0549fbb", + "metadata": {}, + "outputs": [], + "source": [ + "new_plan = await kernel.run_on_str_async(ask, planner[\"CreatePlan\"]);" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dfe479d6", + "metadata": {}, + "outputs": [], + "source": [ + "print(\"Updated plan:\\n\");\n", + "print(new_plan.variales.to_plan().plan_string);" + ] + }, + { + "cell_type": "markdown", + "id": "f5e6ac56", + "metadata": {}, + "source": [ + "### Executing the plan\n", + "\n", + "Now that we have a plan, let's try to execute it! The Planner has a skill called `ExecutePlan`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cf30b5da", + "metadata": {}, + "outputs": [], + "source": [ + "execution_results = new_plan" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "eddf13ea", + "metadata": {}, + "outputs": [], + "source": [ + "step = 1\n", + "maxSteps = 10\n", + "while (not execution_results.variales.to_plan().is_complete and step < maxSteps):\n", + " results = await kernel.run_on_str_async(execution_results.variables, planner[\"ExecutePlan\"])\n", + " if (results.variabes.to_plan().is_successful):\n", + " print(\"Step {step} - Execution results:\\n\")\n", + " print(results.variables.to_plan().plan_string)\n", + "\n", + " if (results.variables.to_plan().is_complete):\n", + " print(\"Step {step} - COMPLETE!\")\n", + " print(results.variables.to_plan().result)\n", + " break\n", + " else:\n", + " print(\"Step {step} - Execution failed:\")\n", + " print(results.variables.to_plan().result)\n", + " break\n", + " \n", + " execution_results = results\n", + " step += 1\n", + " print(\"\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/samples/notebooks/python/6-memory-and-embeddings.ipynb b/samples/notebooks/python/6-memory-and-embeddings.ipynb new file mode 100644 index 000000000000..f8ae7e97c13e --- /dev/null +++ b/samples/notebooks/python/6-memory-and-embeddings.ipynb @@ -0,0 +1,435 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "68e1c158", + "metadata": {}, + "source": [ + "# Building Semantic Memory with Embeddings\n", + "\n", + "So far, we've mostly been treating the kernel as a stateless orchestration engine.\n", + "We send text into a model API and receive text out. \n", + "\n", + "In a [previous notebook](4-context-variables-chat.ipynb), we used `context variables` to pass in additional\n", + "text into prompts to enrich them with more context. This allowed us to create a basic chat experience. \n", + "\n", + "However, if you solely relied on context variables, you would quickly realize that eventually your prompt\n", + "would grow so large that you would run into a the model's token limit. What we need is a way to persist state\n", + "and build both short-term and long-term memory to empower even more intelligent applications. \n", + "\n", + "To do this, we dive into the key concept of `Semantic Memory` in the Semantic Kernel. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "508ad44f", + "metadata": {}, + "outputs": [], + "source": [ + "from init import *\n", + "import semantic_kernel as sk\n", + "from semantic_kernel.kernel_extensions.inline_function_definitions import create_semantic_function\n", + "from semantic_kernel.core_skills import TextMemorySkill\n", + "from typing import Tuple" + ] + }, + { + "cell_type": "markdown", + "id": "d8ddffc1", + "metadata": {}, + "source": [ + "In order to use memory, we need to instantiate the Kernel with a Memory Storage\n", + "and an Embedding backend. In this example, we make use of the `VolatileMemoryStore` \"which can be thought of as a temporary in-memory storage (not to be confused with Semantic Memory). This memory is not written to disk and is only available during the app session.\n", + "\n", + "When developing your app you will have the option to plug in persistent storage like Azure Cosmos Db, PostgreSQL, SQLite, etc. Semantic Memory allows also to index external data sources, without duplicating all the information, more on that later." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8f8dcbc6", + "metadata": {}, + "outputs": [], + "source": [ + "def build_kernel(useAzureOpenAI=False) -> sk.KernelBase:\n", + " # Setup kernel with OpenAI completion and embedding backends\n", + "\n", + " if (useAzureOpenAI):\n", + " api_key, endpoint = sk.azure_openai_settings_from_dot_env()\n", + " else:\n", + " api_key, org_id = sk.openai_settings_from_dot_env()\n", + "\n", + " kernel = (\n", + " sk.kernel_builder()\n", + " .configure(\n", + " lambda c: c.add_azure_openai_completion_backend(\n", + " \"davinci-002\", \"text-davinci-002\", endpoint, api_key\n", + " ) if (useAzureOpenAI) else c.add_openai_completion_backend(\n", + " \"davinci-002\", \"text-davinci-002\", api_key, org_id\n", + " )\n", + " )\n", + " .configure(\n", + " lambda c: c.add_azure_openai_embeddings_backend(\n", + " \"ada-002\", \"text-embedding-ada-002\", endpoint, api_key\n", + " ) if (useAzureOpenAI) else c.add_open_ai_embeddings_backend(\n", + " \"ada-002\", \"text-embedding-ada-002\", api_key, org_id\n", + " )\n", + " )\n", + " .with_memory_storage(sk.memory.VolatileMemoryStore())\n", + " .build()\n", + " )\n", + " \n", + " return kernel" + ] + }, + { + "cell_type": "markdown", + "id": "e7fefb6a", + "metadata": {}, + "source": [ + "At its core, Semantic Memory is a set of data structures that allow you to store the meaning of text that come from different data sources, and optionally to store the source text too. These texts can be from the web, e-mail providers, chats, a database, or from your local directory, and are hooked up to the Semantic Kernel through data source connectors.\n", + "\n", + "The texts are embedded or compressed into a vector of floats representing mathematically the texts' contents and meaning. You can read more about embeddings [here](https://aka.ms/sk/embeddings)." + ] + }, + { + "cell_type": "markdown", + "id": "2a7e7ca4", + "metadata": {}, + "source": [ + "### Manually adding memories\n", + "Let's create some initial memories \"About Me\". We can add memories to our `VolatileMemoryStore` by using `SaveInformationAsync`" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d096504c", + "metadata": {}, + "outputs": [], + "source": [ + "async def populate_memory(kernel: sk.KernelBase) -> None:\n", + "\n", + " memory_collection_name = \"aboutMe\"\n", + " # Add some documents to the kernel's memory\n", + " await kernel.memory.save_information_async(\n", + " memory_collection_name, id=\"info1\", text=\"My name is Andrea\"\n", + " )\n", + " await kernel.memory.save_information_async(\n", + " memory_collection_name, id=\"info2\", text=\"I currently work as a tour guide\"\n", + " )\n", + " await kernel.memory.save_information_async(\n", + " memory_collection_name, id=\"info3\", text=\"I've been living in Seattle since 2005\"\n", + " )\n", + " await kernel.memory.save_information_async(\n", + " memory_collection_name, id=\"info4\", text=\"I visited France and Italy five times since 2015\"\n", + " )\n", + " await kernel.memory.save_information_async(\n", + " memory_collection_name, id=\"info5\", text=\"My family is from New York\"\n", + " )" + ] + }, + { + "cell_type": "markdown", + "id": "2caf8575", + "metadata": {}, + "source": [ + "Let's try searching the memory:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "628c843e", + "metadata": {}, + "outputs": [], + "source": [ + "async def search_memory_examples(kernel: sk.KernelBase) -> None:\n", + " questions = [\n", + " \"what's my name\",\n", + " \"where do I live?\",\n", + " \"where's my family from?\",\n", + " \"where have I traveled?\",\n", + " \"what do I do for work\",\n", + " ]\n", + "\n", + " for question in questions:\n", + " print(f\"Question: {question}\")\n", + " result = await kernel.memory.search_async(\"aboutMe\", question)\n", + " print(f\"Answer: {result[0].text}\\n\")" + ] + }, + { + "cell_type": "markdown", + "id": "e70c2b22", + "metadata": {}, + "source": [ + "Let's now revisit the our chat sample from the [previous notebook](4-context-variables-chat.ipynb).\n", + "If you remember, we used context variables to fill the prompt with a `history` that continuously got populated as we chatted with the bot. Let's add also memory to it!" + ] + }, + { + "cell_type": "markdown", + "id": "1ed54a32", + "metadata": {}, + "source": [ + "This is done by using the `TextMemorySkill` which exposes the `recall` native function.\n", + "\n", + "`recall` takes an input ask and performs a similarity search on the contents that have\n", + "been embedded in the Memory Store and returns the most relevant memory. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fb8549b2", + "metadata": {}, + "outputs": [], + "source": [ + "async def setup_chat_with_memory(\n", + " kernel: sk.KernelBase,\n", + ") -> Tuple[sk.SKFunctionBase, sk.SKContext]:\n", + " sk_prompt = \"\"\"\n", + " ChatBot can have a conversation with you about any topic.\n", + " It can give explicit instructions or say 'I don't know' if\n", + " it does not have an answer.\n", + "\n", + " Information about me, from previous conversations:\n", + " - {{$fact1}} {{recall $fact1}}\n", + " - {{$fact2}} {{recall $fact2}}\n", + " - {{$fact3}} {{recall $fact3}}\n", + " - {{$fact4}} {{recall $fact4}}\n", + " - {{$fact5}} {{recall $fact5}}\n", + "\n", + " Chat:\n", + " {{$chat_history}}\n", + " User: {{$human_input}}\n", + " ChatBot: \"\"\"\n", + "\n", + " chat_func = create_semantic_function(\n", + " kernel, sk_prompt, max_tokens=200, temperature=0.8\n", + " )\n", + "\n", + " context = kernel.create_new_context()\n", + " context[\"fact1\"] = \"what is my name?\"\n", + " context[\"fact2\"] = \"where do I live?\"\n", + " context[\"fact3\"] = \"where's my family from?\"\n", + " context[\"fact4\"] = \"where have I traveled?\"\n", + " context[\"fact5\"] = \"what do I do for work?\"\n", + "\n", + " context[TextMemorySkill.COLLECTION_PARAM] = \"aboutMe\"\n", + " context[TextMemorySkill.RELEVANCE_PARAM] = 0.8\n", + "\n", + " context[\"chat_history\"] = \"\"\n", + "\n", + " return chat_func, context" + ] + }, + { + "cell_type": "markdown", + "id": "1ac62457", + "metadata": {}, + "source": [ + "The `RelevanceParam` is used in memory search and is a measure of the relevance score from 0.0 to 1.0, where 1.0 means a perfect match. We encourage users to experiment with different values." + ] + }, + { + "cell_type": "markdown", + "id": "645b55a1", + "metadata": {}, + "source": [ + "Now that we've included our memories, let's chat!" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "75267a2f", + "metadata": {}, + "outputs": [], + "source": [ + "async def chat(\n", + " kernel: sk.KernelBase, chat_func: sk.SKFunctionBase, context: sk.SKContext\n", + ") -> bool:\n", + " try:\n", + " human_input = input(\"Human:>\")\n", + " context[\"human_input\"] = human_input\n", + " except KeyboardInterrupt:\n", + " print(\"\\n\\nExiting chat...\")\n", + " return False\n", + " except EOFError:\n", + " print(\"\\n\\nExiting chat...\")\n", + " return False\n", + "\n", + " if human_input == \"exit\":\n", + " print(\"\\n\\nExiting chat...\")\n", + " return False\n", + "\n", + " answer = await kernel.run_on_vars_async(context.variables, chat_func)\n", + " context[\"chat_history\"] += f\"\\nHuman:>{human_input}\\nChatBot:>{answer}\\n\"\n", + "\n", + " print(answer)\n", + " return True" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e3875a34", + "metadata": {}, + "outputs": [], + "source": [ + "kernel = build_kernel()\n", + "\n", + "print(\"Populating memory...\")\n", + "await populate_memory(kernel)\n", + "print(\"Asking questions... (manually)\")\n", + "await search_memory_examples(kernel)\n", + "print(\"Setting up a chat (with memory!)\")\n", + "chat_func, context = await setup_chat_with_memory(kernel)\n", + "print(\"Begin chatting (type 'exit' to exit):\")\n", + "chatting = True\n", + "while chatting:\n", + " chatting = await chat(kernel, chat_func, context)" + ] + }, + { + "cell_type": "markdown", + "id": "0a51542b", + "metadata": {}, + "source": [ + "# TODO: NEED TO IMPLEMENT EMBEDDING SEARCH\n", + "### Adding documents to your memory\n", + "\n", + "Many times in your applications you'll want to bring in external documents into your memory. Let's see how we can do this using our VolatileMemoryStore.\n", + "\n", + "Let's first get some data using some of the links in the Semantic Kernel repo." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c3d5a1b9", + "metadata": {}, + "outputs": [], + "source": [ + "memory_collection_name = \"SKGitHub\"\n", + "\n", + "github_files ={}\n", + "github_files[\"https://github.com/microsoft/semantic-kernel/blob/main/README.md\"] = \"README: Installation, getting started, and how to contribute\"\n", + "github_files[\"https://github.com/microsoft/semantic-kernel/blob/main/samples/notebooks/dotnet/2-running-prompts-from-file.ipynb\"] = \\\n", + " \"Jupyter notebook describing how to pass prompts from a file to a semantic skill or function\"\n", + "github_files[\"https://github.com/microsoft/semantic-kernel/blob/main/samples/notebooks/dotnet/Getting-Started-Notebook.ipynb\"] = \\\n", + " \"Jupyter notebook describing how to get started with the Semantic Kernel\"\n", + "github_files[\"https://github.com/microsoft/semantic-kernel/tree/main/samples/skills/ChatSkill/ChatGPT\"] = \\\n", + " \"Sample demonstrating how to create a chat skill interfacing with ChatGPT\"\n", + "github_files[\"https://github.com/microsoft/semantic-kernel/blob/main/dotnet/src/SemanticKernel/Memory/Volatile/VolatileMemoryStore.cs\"] = \\\n", + " \"C# class that defines a volatile embedding store\"\n", + "github_files[\"https://github.com/microsoft/semantic-kernel/tree/main/samples/dotnet/KernelHttpServer/README.md\"] = \\\n", + " \"README: How to set up a Semantic Kernel Service API using Azure Function Runtime v4\"\n", + "github_files[\"https://github.com/microsoft/semantic-kernel/tree/main/samples/apps/chat-summary-webapp-react/README.md\"] = \\\n", + " \"README: README associated with a sample starter react-based chat summary webapp\"" + ] + }, + { + "cell_type": "markdown", + "id": "a4aa0e5d", + "metadata": {}, + "source": [ + "Let's build a new Kernel." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f6518515", + "metadata": {}, + "outputs": [], + "source": [ + "kernel = build_kernel()" + ] + }, + { + "cell_type": "markdown", + "id": "75f3ea5e", + "metadata": {}, + "source": [ + "Now let's add these files to our VolatileMemoryStore using `SaveReferenceAsync`." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "170e7142", + "metadata": {}, + "outputs": [], + "source": [ + "print(\"Adding some GitHub file URLs and their descriptions to a volatile Semantic Memory.\");\n", + "i = 0\n", + "for entry, value in github_files.items():\n", + " await kernel.memory.save_reference_async(\n", + " collection=memory_collection_name,\n", + " description=value,\n", + " text=value,\n", + " external_id=entry,\n", + " external_source_name=\"GitHub\"\n", + " )\n", + " i += 1\n", + " print(\" URL {} saved\".format(i))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "143911c3", + "metadata": {}, + "outputs": [], + "source": [ + "ask = \"I love Jupyter notebooks, how should I get started?\"\n", + "print(\"===========================\\n\" + \"Query: \" + ask + \"\\n\")\n", + "\n", + "memories = await kernel.memory.search_async(memory_collection_name, ask, limit=5, min_relevance_score=0.77)\n", + "\n", + "i = 0\n", + "for memory in memories:\n", + " print(\"Result {++i}:\")\n", + " print(\" URL: : \" + memory.Id)\n", + " print(\" Title : \" + memory.Description)\n", + " print(\" Relevance: \" + memory.Relevance)\n", + " print()" + ] + }, + { + "cell_type": "markdown", + "id": "59294dac", + "metadata": {}, + "source": [ + "Now you might be wondering what happens if you have so much data that it doesn't fit into your RAM? That's where you want to make use of an external Vector Database made specifically for storing and retrieving embeddings.\n", + "\n", + "Stay tuned for that!" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/samples/notebooks/python/init.py b/samples/notebooks/python/init.py new file mode 100644 index 000000000000..e8fcfe7e6943 --- /dev/null +++ b/samples/notebooks/python/init.py @@ -0,0 +1,62 @@ +# Copyright (c) Microsoft. All rights reserved. + +from typing import Optional, Tuple + + +def openai_settings_from_dot_env() -> Tuple[str, Optional[str]]: + """ + Reads the OpenAI API key and organization ID from the .env file. + + Returns: + Tuple[str, str]: The OpenAI API key, the OpenAI organization ID + """ + + api_key, org_id = None, None + with open(".env", "r") as f: + lines = f.readlines() + + for line in lines: + if line.startswith("OPENAI_API_KEY"): + parts = line.split("=")[1:] + api_key = "=".join(parts).strip().strip('"') + break + + if line.startswith("OPENAI_ORG_ID"): + parts = line.split("=")[1:] + org_id = "=".join(parts).strip().strip('"') + break + + assert api_key is not None, "OpenAI API key not found in .env file" + + # It's okay if the org ID is not found (not required) + return api_key, org_id + + +def azure_openai_settings_from_dot_env() -> Tuple[str, str]: + """ + Reads the Azure OpenAI API key and endpoint from the .env file. + + Returns: + Tuple[str, str]: The Azure OpenAI API key, the endpoint + """ + + api_key, endpoint = None, None + with open(".env", "r") as f: + lines = f.readlines() + + for line in lines: + if line.startswith("AZURE_OPENAI_API_KEY"): + parts = line.split("=")[1:] + api_key = "=".join(parts).strip().strip('"') + break + + if line.startswith("AZURE_OPENAI_ENDPOINT"): + parts = line.split("=")[1:] + endpoint = "=".join(parts).strip().strip('"') + break + + # Azure requires both the API key and the endpoint URL. + assert api_key is not None, "Azure OpenAI API key not found in .env file" + assert endpoint is not None, "Azure OpenAI endpoint not found in .env file" + + return api_key, endpoint