diff --git a/.dockerignore b/.dockerignore
index 701807953..b8c1be153 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -65,7 +65,6 @@ htmlcov/
.nox/
.coverage
.coverage.*
-.cache
nosetests.xml
coverage.xml
*.cover
@@ -73,6 +72,11 @@ coverage.xml
.hypothesis/
.pytest_cache/
+# Ignore .cache except calibration
+.cache/*
+!.cache/calibration/
+!.cache/calibration/**
+
# Translations
*.mo
*.pot
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index 4063e395f..2e6fd4490 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -21,7 +21,7 @@ Provide a simple way for the reviewer to try out your changes.
Examples:
```bash
-DATA_DIR=tests/data pytest -sx tests/test_stuff.py::test_something
+pytest -sx tests/test_stuff.py::test_something
```
```bash
python lerobot/scripts/train.py --some.option=true
diff --git a/.github/workflows/nightly-tests.yml b/.github/workflows/nightly-tests.yml
index f967533ae..bbee19a17 100644
--- a/.github/workflows/nightly-tests.yml
+++ b/.github/workflows/nightly-tests.yml
@@ -7,10 +7,8 @@ on:
schedule:
- cron: "0 2 * * *"
-env:
- DATA_DIR: tests/data
+# env:
# SLACK_API_TOKEN: ${{ secrets.SLACK_API_TOKEN }}
-
jobs:
run_all_tests_cpu:
name: CPU
@@ -30,13 +28,9 @@ jobs:
working-directory: /lerobot
steps:
- name: Tests
- env:
- DATA_DIR: tests/data
run: pytest -v --cov=./lerobot --disable-warnings tests
- name: Tests end-to-end
- env:
- DATA_DIR: tests/data
run: make test-end-to-end
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 038b44582..5de071750 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -11,6 +11,7 @@ on:
- ".github/**"
- "poetry.lock"
- "Makefile"
+ - ".cache/**"
push:
branches:
- main
@@ -21,13 +22,13 @@ on:
- ".github/**"
- "poetry.lock"
- "Makefile"
+ - ".cache/**"
jobs:
pytest:
name: Pytest
runs-on: ubuntu-latest
env:
- DATA_DIR: tests/data
MUJOCO_GL: egl
steps:
- uses: actions/checkout@v4
@@ -35,13 +36,17 @@ jobs:
lfs: true # Ensure LFS files are pulled
- name: Install apt dependencies
- run: sudo apt-get update && sudo apt-get install -y libegl1-mesa-dev ffmpeg
+ # portaudio19-dev is needed to install pyaudio
+ run: |
+ sudo apt-get update && \
+ sudo apt-get install -y libegl1-mesa-dev ffmpeg portaudio19-dev
- name: Install poetry
run: |
pipx install poetry && poetry config virtualenvs.in-project true
echo "${{ github.workspace }}/.venv/bin" >> $GITHUB_PATH
+ # TODO(rcadene, aliberts): python 3.12 seems to be used in the tests, not python 3.10
- name: Set up Python 3.10
uses: actions/setup-python@v5
with:
@@ -60,12 +65,10 @@ jobs:
-W ignore::UserWarning:gymnasium.utils.env_checker:247 \
&& rm -rf tests/outputs outputs
-
pytest-minimal:
name: Pytest (minimal install)
runs-on: ubuntu-latest
env:
- DATA_DIR: tests/data
MUJOCO_GL: egl
steps:
- uses: actions/checkout@v4
@@ -80,6 +83,7 @@ jobs:
pipx install poetry && poetry config virtualenvs.in-project true
echo "${{ github.workspace }}/.venv/bin" >> $GITHUB_PATH
+ # TODO(rcadene, aliberts): python 3.12 seems to be used in the tests, not python 3.10
- name: Set up Python 3.10
uses: actions/setup-python@v5
with:
@@ -97,12 +101,11 @@ jobs:
-W ignore::UserWarning:gymnasium.utils.env_checker:247 \
&& rm -rf tests/outputs outputs
-
+ # TODO(aliberts, rcadene): redesign after v2 migration / removing hydra
end-to-end:
name: End-to-end
runs-on: ubuntu-latest
env:
- DATA_DIR: tests/data
MUJOCO_GL: egl
steps:
- uses: actions/checkout@v4
@@ -110,7 +113,10 @@ jobs:
lfs: true # Ensure LFS files are pulled
- name: Install apt dependencies
- run: sudo apt-get update && sudo apt-get install -y libegl1-mesa-dev
+ # portaudio19-dev is needed to install pyaudio
+ run: |
+ sudo apt-get update && \
+ sudo apt-get install -y libegl1-mesa-dev portaudio19-dev
- name: Install poetry
run: |
diff --git a/.gitignore b/.gitignore
index 0e203a394..dfd929b81 100644
--- a/.gitignore
+++ b/.gitignore
@@ -153,3 +153,6 @@ dmypy.json
# Cython debug symbols
cython_debug/
+
+# slurm scripts
+slurm/
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 822197ba2..b8c198568 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -267,7 +267,7 @@ We use `pytest` in order to run the tests. From the root of the
repository, here's how to run tests with `pytest` for the library:
```bash
-DATA_DIR="tests/data" python -m pytest -sv ./tests
+python -m pytest -sv ./tests
```
diff --git a/Makefile b/Makefile
index f6517497e..a95d7614e 100644
--- a/Makefile
+++ b/Makefile
@@ -30,6 +30,8 @@ test-end-to-end:
${MAKE} DEVICE=$(DEVICE) test-tdmpc-ete-eval
${MAKE} DEVICE=$(DEVICE) test-default-ete-eval
${MAKE} DEVICE=$(DEVICE) test-act-pusht-tutorial
+ ${MAKE} DEVICE=$(DEVICE) test-act-ete-train-accelerate-amp
+ ${MAKE} DEVICE=$(DEVICE) test-act-ete-eval-accelerate-amp
test-act-ete-train:
python lerobot/scripts/train.py \
@@ -188,3 +190,31 @@ test-act-pusht-tutorial:
training.image_transforms.enable=true \
hydra.run.dir=tests/outputs/act_pusht/
rm lerobot/configs/policy/created_by_Makefile.yaml
+
+
+test-act-ete-train-accelerate-amp:
+ python -m accelerate.commands.launch --cpu --mixed-precision=fp16 lerobot/scripts/train.py \
+ policy=act \
+ policy.dim_model=64 \
+ env=aloha \
+ wandb.enable=False \
+ training.offline_steps=2 \
+ training.online_steps=0 \
+ eval.n_episodes=1 \
+ eval.batch_size=1 \
+ device=$(DEVICE) \
+ training.save_checkpoint=true \
+ training.save_freq=2 \
+ policy.n_action_steps=20 \
+ policy.chunk_size=20 \
+ training.batch_size=2 \
+ hydra.run.dir=tests/outputs/act_amp/ \
+ training.image_transforms.enable=true
+
+test-act-ete-eval-accelerate-amp:
+ python -m accelerate.commands.launch --cpu --mixed-precision=fp16 lerobot/scripts/eval.py \
+ -p tests/outputs/act_amp/checkpoints/000002/pretrained_model \
+ eval.n_episodes=1 \
+ eval.batch_size=1 \
+ env.episode_length=8 \
+ device=$(DEVICE)
diff --git a/README.md b/README.md
index 703e64881..d749081b7 100644
--- a/README.md
+++ b/README.md
@@ -23,15 +23,15 @@
-
Hot new tutorial: Getting started with real-world robots
+ New robot in town: SO-100
-
-
We just dropped an in-depth tutorial on how to build your own robot!
+
+
We just added a new tutorial on how to build a more affordable robot, at the price of $110 per arm!
Teach it new skills by showing it a few moves with just a laptop.
Then watch your homemade robot act autonomously 🤯
-
For more info, see our thread on X or our tutorial page.
+
Follow the link to the full tutorial for SO-100.
@@ -55,9 +55,9 @@
- |
- |
- |
+ |
+ |
+ |
ACT policy on ALOHA env |
@@ -66,6 +66,11 @@
+### News
+
+* **1-11-2024**: we support the `accelerate` library for distributed training and evaluation on multiple GPUs.
+
+
### Acknowledgment
- Thanks to Tony Zaho, Zipeng Fu and colleagues for open sourcing ACT policy, ALOHA environments and datasets. Ours are adapted from [ALOHA](https://tonyzhaozh.github.io/aloha) and [Mobile ALOHA](https://mobile-aloha.github.io).
@@ -144,7 +149,7 @@ wandb login
### Visualize datasets
-Check out [example 1](./examples/1_load_lerobot_dataset.py) that illustrates how to use our dataset class which automatically download data from the Hugging Face hub.
+Check out [example 1](./examples/1_load_lerobot_dataset.py) that illustrates how to use our dataset class which automatically downloads data from the Hugging Face hub.
You can also locally visualize episodes from a dataset on the hub by executing our script from the command line:
```bash
@@ -153,10 +158,12 @@ python lerobot/scripts/visualize_dataset.py \
--episode-index 0
```
-or from a dataset in a local folder with the root `DATA_DIR` environment variable (in the following case the dataset will be searched for in `./my_local_data_dir/lerobot/pusht`)
+or from a dataset in a local folder with the `root` option and the `--local-files-only` (in the following case the dataset will be searched for in `./my_local_data_dir/lerobot/pusht`)
```bash
-DATA_DIR='./my_local_data_dir' python lerobot/scripts/visualize_dataset.py \
+python lerobot/scripts/visualize_dataset.py \
--repo-id lerobot/pusht \
+ --root ./my_local_data_dir \
+ --local-files-only 1 \
--episode-index 0
```
@@ -208,12 +215,10 @@ dataset attributes:
A `LeRobotDataset` is serialised using several widespread file formats for each of its parts, namely:
- hf_dataset stored using Hugging Face datasets library serialization to parquet
-- videos are stored in mp4 format to save space or png files
-- episode_data_index saved using `safetensor` tensor serialization format
-- stats saved using `safetensor` tensor serialization format
-- info are saved using JSON
+- videos are stored in mp4 format to save space
+- metadata are stored in plain json/jsonl files
-Dataset can be uploaded/downloaded from the HuggingFace hub seamlessly. To work on a local dataset, you can set the `DATA_DIR` environment variable to your root dataset folder as illustrated in the above section on dataset visualization.
+Dataset can be uploaded/downloaded from the HuggingFace hub seamlessly. To work on a local dataset, you can use the `local_files_only` argument and specify its location with the `root` argument if it's not in the default `~/.cache/huggingface/lerobot` location.
### Evaluate a pretrained policy
@@ -280,12 +285,36 @@ To use wandb for logging training and evaluation curves, make sure you've run `w
wandb.enable=true
```
-A link to the wandb logs for the run will also show up in yellow in your terminal. Here is an example of what they look like in your browser. Please also check [here](https://github.com/huggingface/lerobot/blob/main/examples/4_train_policy_with_script.md#typical-logs-and-metrics) for the explaination of some commonly used metrics in logs.
+A link to the wandb logs for the run will also show up in yellow in your terminal. Here is an example of what they look like in your browser. Please also check [here](https://github.com/huggingface/lerobot/blob/main/examples/4_train_policy_with_script.md#typical-logs-and-metrics) for the explanation of some commonly used metrics in logs.
![](media/wandb.png)
Note: For efficiency, during training every checkpoint is evaluated on a low number of episodes. You may use `eval.n_episodes=500` to evaluate on more episodes than the default. Or, after training, you may want to re-evaluate your best checkpoints on more episodes or change the evaluation settings. See `python lerobot/scripts/eval.py --help` for more instructions.
+### Distributed training end evaluation on multiple GPUs/Nodes:
+
+We use the [accelerate](https://huggingface.co/docs/accelerate/basic_tutorials/launch#using-accelerate-launch) library to handle training/evaluating on multiple GPUs/nodes.
+
+To perform distributed training you should use the `python -m accelerate.commands.launch` command. Here’s an example of launching a training script across 2 GPUs :
+
+```bash
+accelerate launch --num_processes=2 lerobot/scripts/train.py \
+ policy=act \
+ env=aloha \
+ env.task=AlohaTransferCube-v0 \
+ dataset_repo_id=lerobot/aloha_sim_transfer_cube_human \
+```
+Check out [example 7](./examples/12_train_policy_accelerate.py)
+
+(Note: Make sure you accelerate is installed otherwise do: `pip install accelerate`)
+
+And to evaluate a policy you can use the following:
+```
+accelerate launch --num_processes=1 --mixed_precision=fp16 lerobot/scripts/eval.py -p lerobot/diffusion_pusht
+```
+
+Note that to reproduce the same results across different GPUs configurations, you should take into account several hyperparameters (explained more [here](https://huggingface.co/docs/accelerate/v1.1.0/en/concept_guides/performance)). In particular, the batch size is multiplied by the number of GPUs, so you should either divide the batch size or the number of training steps by the number of GPUs (be carefull if you are using an lr scheduler).
+
#### Reproduce state-of-the-art (SOTA)
We have organized our configuration files (found under [`lerobot/configs`](./lerobot/configs)) such that they reproduce SOTA results from a given model variant in their respective original works. Simply running:
diff --git a/Tests.ipynb b/Tests.ipynb
new file mode 100644
index 000000000..15ab421e0
--- /dev/null
+++ b/Tests.ipynb
@@ -0,0 +1,796 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "id": "4f42f852-922a-45af-bf82-fbc156f8de76",
+ "metadata": {},
+ "source": [
+ "## Imports"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "id": "454ed22e-0629-4608-a826-67b4760ea4d5",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "%load_ext autoreload\n",
+ "%autoreload 2\n",
+ "from pathlib import Path\n",
+ "from pprint import pprint\n",
+ "\n",
+ "import imageio\n",
+ "import torch\n",
+ "\n",
+ "import lerobot\n",
+ "from IPython.display import Video"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "3d73c1af-cfaf-4a9d-a6bf-33c6e45669fd",
+ "metadata": {},
+ "source": [
+ "## Datasets"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "id": "8b49acf4-ad4a-4170-b129-a7e2e8f48958",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "from lerobot.common.datasets.lerobot_dataset import LeRobotDataset\n",
+ "\n",
+ "# print(\"List of available datasets:\")\n",
+ "# pprint(lerobot.available_datasets)\n",
+ "\n",
+ "repo_id = \"lerobot/aloha_sim_insertion_human\"\n",
+ "# You can easily load a dataset from a Hugging Face repository\n",
+ "dataset = LeRobotDataset(repo_id)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 17,
+ "id": "f422e316-ff93-4137-b8f9-090cfac3cbc2",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "LeRobotDataset(\n",
+ " Repository ID: 'lerobot/aloha_sim_insertion_human',\n",
+ " Split: 'train',\n",
+ " Number of Samples: 25000,\n",
+ " Number of Episodes: 50,\n",
+ " Type: video (.mp4),\n",
+ " Recorded Frames per Second: 50,\n",
+ " Camera Keys: ['observation.images.top'],\n",
+ " Video Frame Keys: ['observation.images.top'],\n",
+ " Transformations: None,\n",
+ " Codebase Version: v1.6,\n",
+ ")\n",
+ "Dataset({\n",
+ " features: ['observation.images.top', 'observation.state', 'action', 'episode_index', 'frame_index', 'timestamp', 'next.done', 'index'],\n",
+ " num_rows: 25000\n",
+ "})\n",
+ "\n",
+ "average number of frames per episode: 500.000\n",
+ "frames per second used during data collection: dataset.fps=50\n",
+ "keys to access images from cameras: dataset.camera_keys=['observation.images.top']\n",
+ "\n"
+ ]
+ }
+ ],
+ "source": [
+ "# LeRobotDataset is actually a thin wrapper around an underlying Hugging Face dataset\n",
+ "# (see https://huggingface.co/docs/datasets/index for more information).\n",
+ "print(dataset)\n",
+ "print(dataset.hf_dataset)\n",
+ "\n",
+ "# And provides additional utilities for robotics and compatibility with Pytorch\n",
+ "print(f\"\\naverage number of frames per episode: {dataset.num_samples / dataset.num_episodes:.3f}\")\n",
+ "print(f\"frames per second used during data collection: {dataset.fps=}\")\n",
+ "print(f\"keys to access images from cameras: {dataset.camera_keys=}\\n\")\n",
+ "\n",
+ "# Access frame indexes associated to first episode\n",
+ "episode_index = 0\n",
+ "from_idx = dataset.episode_data_index[\"from\"][episode_index].item()\n",
+ "to_idx = dataset.episode_data_index[\"to\"][episode_index].item()\n",
+ "\n",
+ "# LeRobot datasets actually subclass PyTorch datasets so you can do everything you know and love from working\n",
+ "# with the latter, like iterating through the dataset. Here we grab all the image frames.\n",
+ "frames = [dataset[idx][\"observation.images.top\"] for idx in range(from_idx, to_idx)]\n",
+ "\n",
+ "# Video frames are now float32 in range [0,1] channel first (c,h,w) to follow pytorch convention. To visualize\n",
+ "# them, we convert to uint8 in range [0,255]\n",
+ "frames = [(frame * 255).type(torch.uint8) for frame in frames]\n",
+ "# and to channel last (h,w,c).\n",
+ "frames = [frame.permute((1, 2, 0)).numpy() for frame in frames]"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 21,
+ "id": "155e0586-47ea-4e13-8bb3-36b0ac54c50d",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ ""
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 21,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "from IPython.display import Video\n",
+ "# Finally, we save the frames to a mp4 video for visualization.\n",
+ "dir_path = \"outputs/examples/1_load_lerobot_dataset\"\n",
+ "Path(dir_path).mkdir(parents=True, exist_ok=True)\n",
+ "video_path = dir_path + \"/episode_0.mp4\"\n",
+ "imageio.mimsave(video_path, frames, fps=dataset.fps)\n",
+ "Video(video_path)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 24,
+ "id": "5d75427d-1fc7-408b-937a-a1c998878537",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "Fetching 56 files: 100%|████████████████████████████████████████████████████████████████████████████████████████████████████████████| 56/56 [00:00<00:00, 1539.36it/s]\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "\n",
+ "dataset[0]['observation.images.top'].shape=torch.Size([4, 3, 480, 640])\n",
+ "dataset[0]['observation.state'].shape=torch.Size([8, 14])\n",
+ "dataset[0]['action'].shape=torch.Size([64, 14])\n",
+ "\n",
+ "batch['observation.images.top'].shape=torch.Size([32, 4, 3, 480, 640])\n",
+ "batch['observation.state'].shape=torch.Size([32, 8, 14])\n",
+ "batch['action'].shape=torch.Size([32, 64, 14])\n"
+ ]
+ }
+ ],
+ "source": [
+ "# For many machine learning applications we need to load the history of past observations or trajectories of\n",
+ "# future actions. Our datasets can load previous and future frames for each key/modality, using timestamps\n",
+ "# differences with the current loaded frame. For instance:\n",
+ "delta_timestamps = {\n",
+ " # loads 4 images: 1 second before current frame, 500 ms before, 200 ms before, and current frame\n",
+ " \"observation.images.top\": [-1, -0.5, -0.20, 0],\n",
+ " # loads 8 state vectors: 1.5 seconds before, 1 second before, ... 20 ms, 10 ms, and current frame\n",
+ " \"observation.state\": [-1.5, -1, -0.5, -0.20, -0.10, -0.02, -0.01, 0],\n",
+ " # loads 64 action vectors: current frame, 1 frame in the future, 2 frames, ... 63 frames in the future\n",
+ " \"action\": [t / dataset.fps for t in range(64)],\n",
+ "}\n",
+ "dataset = LeRobotDataset(repo_id, delta_timestamps=delta_timestamps)\n",
+ "print(f\"\\n{dataset[0]['observation.images.top'].shape=}\") # (4,c,h,w)\n",
+ "print(f\"{dataset[0]['observation.state'].shape=}\") # (8,c)\n",
+ "print(f\"{dataset[0]['action'].shape=}\\n\") # (64,c)\n",
+ "\n",
+ "# Finally, our datasets are fully compatible with PyTorch dataloaders and samplers because they are just\n",
+ "# PyTorch datasets.\n",
+ "dataloader = torch.utils.data.DataLoader(\n",
+ " dataset,\n",
+ " num_workers=0,\n",
+ " batch_size=32,\n",
+ " shuffle=True,\n",
+ ")\n",
+ "for batch in dataloader:\n",
+ " print(f\"{batch['observation.images.top'].shape=}\") # (32,4,c,h,w)\n",
+ " print(f\"{batch['observation.state'].shape=}\") # (32,8,c)\n",
+ " print(f\"{batch['action'].shape=}\") # (32,64,c)\n",
+ " break"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 29,
+ "id": "699cebdb-9edb-45a0-89c8-06085e09f7ca",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "tensor([ 0.0281, -0.5264, 0.8721, -0.0157, 0.4200, -0.0215, 0.1536, -0.0547,\n",
+ " -0.8091, 0.9081, 0.0494, 0.3163, 0.1224, -0.0032])"
+ ]
+ },
+ "execution_count": 29,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "batch['observation.state'][0][0]"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "aa7236e1-53ea-4f6f-96cf-e0f344282c6e",
+ "metadata": {},
+ "source": [
+ "## Training"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "id": "7f4eda23-6f01-4e2a-bf97-3a87d8c2f761",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "Generating train split: 25650 examples [00:00, 381978.62 examples/s]\n",
+ "Fetching 212 files: 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████| 212/212 [00:07<00:00, 28.90it/s]\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "step: 0 loss: 1.193\n",
+ "step: 250 loss: 0.067\n",
+ "step: 500 loss: 0.054\n",
+ "step: 750 loss: 0.056\n",
+ "step: 1000 loss: 0.049\n",
+ "step: 1250 loss: 0.057\n",
+ "step: 1500 loss: 0.044\n",
+ "step: 1750 loss: 0.048\n",
+ "step: 2000 loss: 0.050\n",
+ "step: 2250 loss: 0.068\n",
+ "step: 2500 loss: 0.045\n",
+ "step: 2750 loss: 0.025\n",
+ "step: 3000 loss: 0.045\n",
+ "step: 3250 loss: 0.025\n",
+ "step: 3500 loss: 0.040\n",
+ "step: 3750 loss: 0.041\n",
+ "step: 4000 loss: 0.052\n",
+ "step: 4250 loss: 0.034\n",
+ "step: 4500 loss: 0.028\n",
+ "step: 4750 loss: 0.040\n"
+ ]
+ }
+ ],
+ "source": [
+ "from pathlib import Path\n",
+ "\n",
+ "import torch\n",
+ "\n",
+ "from lerobot.common.datasets.lerobot_dataset import LeRobotDataset\n",
+ "from lerobot.common.policies.diffusion.configuration_diffusion import DiffusionConfig\n",
+ "from lerobot.common.policies.diffusion.modeling_diffusion import DiffusionPolicy\n",
+ "\n",
+ "# Create a directory to store the training checkpoint.\n",
+ "output_directory = Path(\"outputs/train/example_pusht_diffusion\")\n",
+ "output_directory.mkdir(parents=True, exist_ok=True)\n",
+ "\n",
+ "# Number of offline training steps (we'll only do offline training for this example.)\n",
+ "# Adjust as you prefer. 5000 steps are needed to get something worth evaluating.\n",
+ "training_steps = 5000\n",
+ "device = torch.device(\"cuda\")\n",
+ "log_freq = 250\n",
+ "\n",
+ "# Set up the dataset.\n",
+ "delta_timestamps = {\n",
+ " # Load the previous image and state at -0.1 seconds before current frame,\n",
+ " # then load current image and state corresponding to 0.0 second.\n",
+ " \"observation.image\": [-0.1, 0.0],\n",
+ " \"observation.state\": [-0.1, 0.0],\n",
+ " # Load the previous action (-0.1), the next action to be executed (0.0),\n",
+ " # and 14 future actions with a 0.1 seconds spacing. All these actions will be\n",
+ " # used to supervise the policy.\n",
+ " \"action\": [-0.1, 0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3, 1.4],\n",
+ "}\n",
+ "dataset = LeRobotDataset(\"lerobot/pusht\", delta_timestamps=delta_timestamps)\n",
+ "\n",
+ "# Set up the the policy.\n",
+ "# Policies are initialized with a configuration class, in this case `DiffusionConfig`.\n",
+ "# For this example, no arguments need to be passed because the defaults are set up for PushT.\n",
+ "# If you're doing something different, you will likely need to change at least some of the defaults.\n",
+ "cfg = DiffusionConfig()\n",
+ "policy = DiffusionPolicy(cfg, dataset_stats=dataset.stats)\n",
+ "policy.train()\n",
+ "policy.to(device)\n",
+ "\n",
+ "optimizer = torch.optim.Adam(policy.parameters(), lr=1e-4)\n",
+ "\n",
+ "# Create dataloader for offline training.\n",
+ "dataloader = torch.utils.data.DataLoader(\n",
+ " dataset,\n",
+ " num_workers=4,\n",
+ " batch_size=64,\n",
+ " shuffle=True,\n",
+ " pin_memory=device != torch.device(\"cpu\"),\n",
+ " drop_last=True,\n",
+ ")\n",
+ "\n",
+ "# Run training loop.\n",
+ "step = 0\n",
+ "done = False\n",
+ "while not done:\n",
+ " for batch in dataloader:\n",
+ " batch = {k: v.to(device, non_blocking=True) for k, v in batch.items()}\n",
+ " output_dict = policy.forward(batch)\n",
+ " loss = output_dict[\"loss\"]\n",
+ " loss.backward()\n",
+ " optimizer.step()\n",
+ " optimizer.zero_grad()\n",
+ "\n",
+ " if step % log_freq == 0:\n",
+ " print(f\"step: {step} loss: {loss.item():.3f}\")\n",
+ " step += 1\n",
+ " if step >= training_steps:\n",
+ " done = True\n",
+ " break\n",
+ "\n",
+ "# Save a policy checkpoint.\n",
+ "policy.save_pretrained(output_directory)"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "id": "b9ee042e-43de-489c-9ce5-497a04a48653",
+ "metadata": {},
+ "source": [
+ "## Evaluation"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "id": "771c90ad-a094-42ad-ba81-271fa27c1951",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "/data/mshukor/envs/lerobot/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n",
+ " from .autonotebook import tqdm as notebook_tqdm\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Loading weights from local directory\n",
+ "GPU is available. Device set to: cuda\n",
+ "cuda\n"
+ ]
+ }
+ ],
+ "source": [
+ "from pathlib import Path\n",
+ "\n",
+ "import gym_pusht # noqa: F401\n",
+ "import gymnasium as gym\n",
+ "import imageio\n",
+ "import numpy\n",
+ "import torch\n",
+ "from huggingface_hub import snapshot_download\n",
+ "\n",
+ "from lerobot.common.policies.diffusion.modeling_diffusion import DiffusionPolicy\n",
+ "from lerobot.common.policies.act.modeling_act import ACTPolicy\n",
+ "\n",
+ "\n",
+ "\n",
+ "# Download the diffusion policy for pusht environment\n",
+ "# pretrained_policy_path = Path(snapshot_download(\"lerobot/diffusion_pusht\"))\n",
+ "# OR uncomment the following to evaluate a policy from the local outputs/train folder.\n",
+ "\n",
+ "# Create a directory to store the video of the evaluation\n",
+ "# output_directory = Path(\"outputs/eval/example_pusht_diffusion\")\n",
+ "# output_directory.mkdir(parents=True, exist_ok=True)\n",
+ "# pretrained_policy_path = Path(\"outputs/train/example_pusht_diffusion\")\n",
+ "# policy = DiffusionPolicy.from_pretrained(pretrained_policy_path)\n",
+ "\n",
+ "TASK_NAME = \"lerobot_base_distributed_aloha_transfer_cube_1gpus\"\n",
+ "step = 50000\n",
+ "# Create a directory to store the video of the evaluation\n",
+ "output_directory = Path(f\"outputs/eval/{TASK_NAME}\")\n",
+ "output_directory.mkdir(parents=True, exist_ok=True)\n",
+ "policy_path = f\"/data/mshukor/logs/lerobot/{TASK_NAME}/checkpoints/{step:06d}/pretrained_model\"\n",
+ "pretrained_policy_path = Path(policy_path)\n",
+ "policy = ACTPolicy.from_pretrained(pretrained_policy_path)\n",
+ "\n",
+ "\n",
+ "policy.eval()\n",
+ "\n",
+ "# Check if GPU is available\n",
+ "if torch.cuda.is_available():\n",
+ " device = torch.device(\"cuda\")\n",
+ " print(\"GPU is available. Device set to:\", device)\n",
+ "else:\n",
+ " device = torch.device(\"cpu\")\n",
+ " print(f\"GPU is not available. Device set to: {device}. Inference will be slower than on GPU.\")\n",
+ " # Decrease the number of reverse-diffusion steps (trades off a bit of quality for 10x speed)\n",
+ " policy.diffusion.num_inference_steps = 10\n",
+ "\n",
+ "policy.to(device)\n",
+ "print(device)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "id": "8c9151b7-2298-421c-b63b-d97371875730",
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "import gym_aloha \n",
+ "import gym_xarm \n",
+ "# Initialize evaluation environment to render two observation types:\n",
+ "# an image of the scene and state/position of the agent. The environment\n",
+ "# also automatically stops running after 300 interactions/steps.\n",
+ "# env = gym.make(\n",
+ "# \"gym_pusht/PushT-v0\",\n",
+ "# obs_type=\"pixels_agent_pos\",\n",
+ "# max_episode_steps=300,\n",
+ "# )\n",
+ "\n",
+ "# env = gym.make(\n",
+ "# \"gym_aloha/AlohaTransferCube-v0\",\n",
+ "# obs_type=\"pixels_agent_pos\",\n",
+ "# max_episode_steps=300,\n",
+ "# )\n",
+ "\n",
+ "env = gym.make(\n",
+ " \"gym_xarm/XarmLift-v0\",\n",
+ " obs_type=\"pixels_agent_pos\",\n",
+ " max_episode_steps=300,\n",
+ ")\n",
+ "\n",
+ "# Reset the policy and environmens to prepare for rollout\n",
+ "policy.reset()\n",
+ "numpy_observation, info = env.reset(seed=42)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "id": "bdebf0bd-46d5-4d53-ad56-61edabd41ed8",
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "step=0 reward=0.0 terminated=False\n",
+ "step=1 reward=0.0 terminated=False\n",
+ "step=2 reward=0.0 terminated=False\n",
+ "step=3 reward=0.0 terminated=False\n",
+ "step=4 reward=0.0 terminated=False\n",
+ "step=5 reward=0.0 terminated=False\n",
+ "step=6 reward=0.0 terminated=False\n",
+ "step=7 reward=0.0 terminated=False\n",
+ "step=8 reward=0.0 terminated=False\n",
+ "step=9 reward=0.0 terminated=False\n",
+ "step=10 reward=0.0 terminated=False\n",
+ "step=11 reward=0.0 terminated=False\n",
+ "step=12 reward=0.0 terminated=False\n",
+ "step=13 reward=0.0 terminated=False\n",
+ "step=14 reward=0.0 terminated=False\n",
+ "step=15 reward=0.0 terminated=False\n",
+ "step=16 reward=0.0 terminated=False\n",
+ "step=17 reward=0.0 terminated=False\n",
+ "step=18 reward=0.0 terminated=False\n",
+ "step=19 reward=0.0 terminated=False\n",
+ "step=20 reward=0.0 terminated=False\n",
+ "step=21 reward=0.0 terminated=False\n",
+ "step=22 reward=0.0 terminated=False\n",
+ "step=23 reward=0.0 terminated=False\n",
+ "step=24 reward=0.0 terminated=False\n",
+ "step=25 reward=0.0 terminated=False\n",
+ "step=26 reward=0.0 terminated=False\n",
+ "step=27 reward=0.0 terminated=False\n",
+ "step=28 reward=0.0 terminated=False\n",
+ "step=29 reward=0.0 terminated=False\n",
+ "step=30 reward=0.0 terminated=False\n",
+ "step=31 reward=0.0 terminated=False\n",
+ "step=32 reward=0.0 terminated=False\n",
+ "step=33 reward=0.0 terminated=False\n",
+ "step=34 reward=0.0 terminated=False\n",
+ "step=35 reward=0.0060973941747585235 terminated=False\n",
+ "step=36 reward=0.02622279916580214 terminated=False\n",
+ "step=37 reward=0.05121513810520591 terminated=False\n",
+ "step=38 reward=0.07973351723518816 terminated=False\n",
+ "step=39 reward=0.1055264807998946 terminated=False\n",
+ "step=40 reward=0.11397255143592261 terminated=False\n",
+ "step=41 reward=0.1183477905742287 terminated=False\n",
+ "step=42 reward=0.12188789988102575 terminated=False\n",
+ "step=43 reward=0.12545805998888346 terminated=False\n",
+ "step=44 reward=0.1288567941022673 terminated=False\n",
+ "step=45 reward=0.13151607159645246 terminated=False\n",
+ "step=46 reward=0.1334215324045937 terminated=False\n",
+ "step=47 reward=0.13462679837021455 terminated=False\n",
+ "step=48 reward=0.13541090727032962 terminated=False\n",
+ "step=49 reward=0.13590309886928334 terminated=False\n",
+ "step=50 reward=0.13633914976509004 terminated=False\n",
+ "step=51 reward=0.13675125461247126 terminated=False\n",
+ "step=52 reward=0.13710415750492036 terminated=False\n",
+ "step=53 reward=0.1373384382180125 terminated=False\n",
+ "step=54 reward=0.13744056010263833 terminated=False\n",
+ "step=55 reward=0.1373603108492143 terminated=False\n",
+ "step=56 reward=0.13713720481440303 terminated=False\n",
+ "step=57 reward=0.13711259085950156 terminated=False\n",
+ "step=58 reward=0.13711259085950156 terminated=False\n",
+ "step=59 reward=0.13711259085950156 terminated=False\n",
+ "step=60 reward=0.13711259085950156 terminated=False\n",
+ "step=61 reward=0.13711259085950156 terminated=False\n",
+ "step=62 reward=0.13711259085950156 terminated=False\n",
+ "step=63 reward=0.13711259085950156 terminated=False\n",
+ "step=64 reward=0.13711259085950156 terminated=False\n",
+ "step=65 reward=0.13711259085950156 terminated=False\n",
+ "step=66 reward=0.13711259085950156 terminated=False\n",
+ "step=67 reward=0.13711259085950156 terminated=False\n",
+ "step=68 reward=0.13711259085950156 terminated=False\n",
+ "step=69 reward=0.13711259085950156 terminated=False\n",
+ "step=70 reward=0.13711259085950156 terminated=False\n",
+ "step=71 reward=0.13711259085950156 terminated=False\n",
+ "step=72 reward=0.13711259085950156 terminated=False\n",
+ "step=73 reward=0.13711259085950156 terminated=False\n",
+ "step=74 reward=0.13711259085950156 terminated=False\n",
+ "step=75 reward=0.13711259085950156 terminated=False\n",
+ "step=76 reward=0.13711259085950156 terminated=False\n",
+ "step=77 reward=0.13711259085950156 terminated=False\n",
+ "step=78 reward=0.13711259085950156 terminated=False\n",
+ "step=79 reward=0.13711259085950156 terminated=False\n",
+ "step=80 reward=0.14165259163772553 terminated=False\n",
+ "step=81 reward=0.16254716720209392 terminated=False\n",
+ "step=82 reward=0.18038377158048674 terminated=False\n",
+ "step=83 reward=0.1955349503035339 terminated=False\n",
+ "step=84 reward=0.209590275273389 terminated=False\n",
+ "step=85 reward=0.22297886936917977 terminated=False\n",
+ "step=86 reward=0.2392411144470952 terminated=False\n",
+ "step=87 reward=0.2580199500579908 terminated=False\n",
+ "step=88 reward=0.2777329773794388 terminated=False\n",
+ "step=89 reward=0.29761240390900523 terminated=False\n",
+ "step=90 reward=0.31716465663195625 terminated=False\n",
+ "step=91 reward=0.3355213586040695 terminated=False\n",
+ "step=92 reward=0.3524998804211362 terminated=False\n",
+ "step=93 reward=0.36810918114197516 terminated=False\n",
+ "step=94 reward=0.38014989945536126 terminated=False\n",
+ "step=95 reward=0.3905574417126761 terminated=False\n",
+ "step=96 reward=0.4002848213657343 terminated=False\n",
+ "step=97 reward=0.4170736338625133 terminated=False\n",
+ "step=98 reward=0.44908708220841653 terminated=False\n",
+ "step=99 reward=0.49331303205450905 terminated=False\n",
+ "step=100 reward=0.5554300692787169 terminated=False\n",
+ "step=101 reward=0.6250862544059551 terminated=False\n",
+ "step=102 reward=0.6893315351179427 terminated=False\n",
+ "step=103 reward=0.7439593776734601 terminated=False\n",
+ "step=104 reward=0.7813831447770964 terminated=False\n",
+ "step=105 reward=0.8090518882538443 terminated=False\n",
+ "step=106 reward=0.8280920189882142 terminated=False\n",
+ "step=107 reward=0.8437796249119314 terminated=False\n",
+ "step=108 reward=0.8639611941043245 terminated=False\n",
+ "step=109 reward=0.8906448932017331 terminated=False\n",
+ "step=110 reward=0.917804351543538 terminated=False\n",
+ "step=111 reward=0.9421491802064923 terminated=False\n",
+ "step=112 reward=0.9619641827581312 terminated=False\n",
+ "step=113 reward=0.9752323060631967 terminated=False\n",
+ "step=114 reward=0.9769516793273336 terminated=False\n",
+ "step=115 reward=0.9769516793273336 terminated=False\n",
+ "step=116 reward=0.9769516793273336 terminated=False\n",
+ "step=117 reward=0.9791212865711658 terminated=False\n",
+ "step=118 reward=0.9821981164893177 terminated=False\n",
+ "step=119 reward=0.9825801568345679 terminated=False\n",
+ "step=120 reward=0.9825801568345679 terminated=False\n",
+ "step=121 reward=0.9825801568345679 terminated=False\n",
+ "step=122 reward=0.9825801568345679 terminated=False\n",
+ "step=123 reward=0.9825801568345679 terminated=False\n",
+ "step=124 reward=0.9825801568345679 terminated=False\n",
+ "step=125 reward=0.9825801568345679 terminated=False\n",
+ "step=126 reward=0.9825801568345679 terminated=False\n",
+ "step=127 reward=0.9825801568345679 terminated=False\n",
+ "step=128 reward=0.9825801568345679 terminated=False\n",
+ "step=129 reward=0.9825801568345679 terminated=False\n",
+ "step=130 reward=0.9825801568345679 terminated=False\n",
+ "step=131 reward=0.9825801568345679 terminated=False\n",
+ "step=132 reward=0.9825801568345679 terminated=False\n",
+ "step=133 reward=0.9825801568345679 terminated=False\n",
+ "step=134 reward=0.9825801568345679 terminated=False\n",
+ "step=135 reward=0.9825801568345679 terminated=False\n",
+ "step=136 reward=0.9825801568345679 terminated=False\n",
+ "step=137 reward=0.9825801568345679 terminated=False\n",
+ "step=138 reward=0.9825801568345679 terminated=False\n",
+ "step=139 reward=0.9825801568345679 terminated=False\n",
+ "step=140 reward=0.9864498413092376 terminated=False\n",
+ "step=141 reward=0.9976040425707386 terminated=False\n",
+ "step=142 reward=1.0 terminated=True\n",
+ "Success!\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "IMAGEIO FFMPEG_WRITER WARNING: input image is not divisible by macro_block_size=16, resizing from (680, 680) to (688, 688) to ensure video compatibility with most codecs and players. To prevent resizing, make your input image divisible by the macro_block_size or set the macro_block_size to 1 (risking incompatibility).\n",
+ "[swscaler @ 0x5a7cec0] Warning: data is not aligned! This can lead to a speed loss\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Video of the evaluation is available in 'outputs/eval/example_pusht_diffusion/rollout.mp4'.\n"
+ ]
+ },
+ {
+ "ename": "NameError",
+ "evalue": "name 'Video' is not defined",
+ "output_type": "error",
+ "traceback": [
+ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
+ "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
+ "Cell \u001b[0;32mIn[4], line 69\u001b[0m\n\u001b[1;32m 66\u001b[0m imageio\u001b[38;5;241m.\u001b[39mmimsave(\u001b[38;5;28mstr\u001b[39m(video_path), numpy\u001b[38;5;241m.\u001b[39mstack(frames), fps\u001b[38;5;241m=\u001b[39mfps)\n\u001b[1;32m 68\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mVideo of the evaluation is available in \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mvideo_path\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m.\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m---> 69\u001b[0m \u001b[43mVideo\u001b[49m(video_path)\n",
+ "\u001b[0;31mNameError\u001b[0m: name 'Video' is not defined"
+ ]
+ }
+ ],
+ "source": [
+ "# Prepare to collect every rewards and all the frames of the episode,\n",
+ "# from initial state to final state.\n",
+ "rewards = []\n",
+ "frames = []\n",
+ "\n",
+ "# Render frame of the initial state\n",
+ "frames.append(env.render())\n",
+ "\n",
+ "step = 0\n",
+ "done = False\n",
+ "while not done:\n",
+ " # Prepare observation for the policy running in Pytorch\n",
+ " state = torch.from_numpy(numpy_observation[\"agent_pos\"])\n",
+ " image = torch.from_numpy(numpy_observation[\"pixels\"])\n",
+ "\n",
+ " # Convert to float32 with image from channel first in [0,255]\n",
+ " # to channel last in [0,1]\n",
+ " state = state.to(torch.float32)\n",
+ " image = image.to(torch.float32) / 255\n",
+ " image = image.permute(2, 0, 1)\n",
+ "\n",
+ " # Send data tensors from CPU to GPU\n",
+ " state = state.to(device, non_blocking=True)\n",
+ " image = image.to(device, non_blocking=True)\n",
+ "\n",
+ " # Add extra (empty) batch dimension, required to forward the policy\n",
+ " state = state.unsqueeze(0)\n",
+ " image = image.unsqueeze(0)\n",
+ "\n",
+ " # Create the policy input dictionary\n",
+ " observation = {\n",
+ " \"observation.state\": state,\n",
+ " \"observation.image\": image,\n",
+ " }\n",
+ "\n",
+ " # Predict the next action with respect to the current observation\n",
+ " with torch.inference_mode():\n",
+ " action = policy.select_action(observation)\n",
+ "\n",
+ " # Prepare the action for the environment\n",
+ " numpy_action = action.squeeze(0).to(\"cpu\").numpy()\n",
+ "\n",
+ " # Step through the environment and receive a new observation\n",
+ " numpy_observation, reward, terminated, truncated, info = env.step(numpy_action)\n",
+ " print(f\"{step=} {reward=} {terminated=}\")\n",
+ "\n",
+ " # Keep track of all the rewards and frames\n",
+ " rewards.append(reward)\n",
+ " frames.append(env.render())\n",
+ "\n",
+ " # The rollout is considered done when the success state is reach (i.e. terminated is True),\n",
+ " # or the maximum number of iterations is reached (i.e. truncated is True)\n",
+ " done = terminated | truncated | done\n",
+ " step += 1\n",
+ "\n",
+ "if terminated:\n",
+ " print(\"Success!\")\n",
+ "else:\n",
+ " print(\"Failure!\")\n",
+ "\n",
+ "# Get the speed of environment (i.e. its number of frames per second).\n",
+ "fps = env.metadata[\"render_fps\"]\n",
+ "\n",
+ "# Encode all frames into a mp4 video.\n",
+ "video_path = output_directory / \"rollout.mp4\"\n",
+ "imageio.mimsave(str(video_path), numpy.stack(frames), fps=fps)\n",
+ "\n",
+ "print(f\"Video of the evaluation is available in '{video_path}'.\")"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "id": "5dd62065-1ac1-4cff-9fd6-236d490b3808",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ ""
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 6,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "Video(video_path)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "id": "f687655a-a892-4864-9bb6-8e396d8dd72c",
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "torch.Size([1, 3, 96, 96])"
+ ]
+ },
+ "execution_count": 9,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "observation['observation.image'].shape"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "lerobot",
+ "language": "python",
+ "name": "lerobot"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.10.15"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
diff --git a/benchmarks/video/run_video_benchmark.py b/benchmarks/video/run_video_benchmark.py
index 46806c075..e90664872 100644
--- a/benchmarks/video/run_video_benchmark.py
+++ b/benchmarks/video/run_video_benchmark.py
@@ -266,7 +266,7 @@ def benchmark_encoding_decoding(
)
ep_num_images = dataset.episode_data_index["to"][0].item()
- width, height = tuple(dataset[0][dataset.camera_keys[0]].shape[-2:])
+ width, height = tuple(dataset[0][dataset.meta.camera_keys[0]].shape[-2:])
num_pixels = width * height
video_size_bytes = video_path.stat().st_size
images_size_bytes = get_directory_size(imgs_dir)
diff --git a/docker/lerobot-cpu/Dockerfile b/docker/lerobot-cpu/Dockerfile
index 34f5361a8..707a6c65b 100644
--- a/docker/lerobot-cpu/Dockerfile
+++ b/docker/lerobot-cpu/Dockerfile
@@ -22,7 +22,7 @@ RUN echo "source /opt/venv/bin/activate" >> /root/.bashrc
COPY . /lerobot
WORKDIR /lerobot
RUN pip install --upgrade --no-cache-dir pip
-RUN pip install --no-cache-dir ".[test, aloha, xarm, pusht, dynamixel]" \
+RUN pip install --no-cache-dir ".[test, aloha, xarm, pusht, dynamixel, accelerate]" \
--extra-index-url https://download.pytorch.org/whl/cpu
# Set EGL as the rendering backend for MuJoCo
diff --git a/docker/lerobot-gpu/Dockerfile b/docker/lerobot-gpu/Dockerfile
index 92640cf4b..6b9429b77 100644
--- a/docker/lerobot-gpu/Dockerfile
+++ b/docker/lerobot-gpu/Dockerfile
@@ -24,7 +24,7 @@ RUN echo "source /opt/venv/bin/activate" >> /root/.bashrc
COPY . /lerobot
WORKDIR /lerobot
RUN pip install --upgrade --no-cache-dir pip
-RUN pip install --no-cache-dir ".[test, aloha, xarm, pusht, dynamixel]"
+RUN pip install --no-cache-dir ".[test, aloha, xarm, pusht, dynamixel, accelerate]"
# Set EGL as the rendering backend for MuJoCo
ENV MUJOCO_GL="egl"
diff --git a/examples/10_use_so100.md b/examples/10_use_so100.md
new file mode 100644
index 000000000..70e4ed8ba
--- /dev/null
+++ b/examples/10_use_so100.md
@@ -0,0 +1,275 @@
+This tutorial explains how to use [SO-100](https://github.com/TheRobotStudio/SO-ARM100) with LeRobot.
+
+## Source the parts
+
+Follow this [README](https://github.com/TheRobotStudio/SO-ARM100). It contains the bill of materials, with link to source the parts, as well as the instructions to 3D print the parts, and advices if it's your first time printing or if you don't own a 3D printer already.
+
+**Important**: Before assembling, you will first need to configure your motors. To this end, we provide a nice script, so let's first install LeRobot. After configuration, we will also guide you through assembly.
+
+## Install LeRobot
+
+On your computer:
+
+1. [Install Miniconda](https://docs.anaconda.com/miniconda/#quick-command-line-install):
+```bash
+mkdir -p ~/miniconda3
+wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda3/miniconda.sh
+bash ~/miniconda3/miniconda.sh -b -u -p ~/miniconda3
+rm ~/miniconda3/miniconda.sh
+~/miniconda3/bin/conda init bash
+```
+
+2. Restart shell or `source ~/.bashrc`
+
+3. Create and activate a fresh conda environment for lerobot
+```bash
+conda create -y -n lerobot python=3.10 && conda activate lerobot
+```
+
+4. Clone LeRobot:
+```bash
+git clone https://github.com/huggingface/lerobot.git ~/lerobot
+```
+
+5. Install LeRobot with dependencies for the feetech motors:
+```bash
+cd ~/lerobot && pip install -e ".[feetech]"
+```
+
+For Linux only (not Mac), install extra dependencies for recording datasets:
+```bash
+conda install -y -c conda-forge ffmpeg
+pip uninstall -y opencv-python
+conda install -y -c conda-forge "opencv>=4.10.0"
+```
+
+## Configure the motors
+
+Follow steps 1 of the [assembly video](https://www.youtube.com/watch?v=FioA2oeFZ5I) which illustrates the use of our scripts below.
+
+**Find USB ports associated to your arms**
+To find the correct ports for each arm, run the utility script twice:
+```bash
+python lerobot/scripts/find_motors_bus_port.py
+```
+
+Example output when identifying the leader arm's port (e.g., `/dev/tty.usbmodem575E0031751` on Mac, or possibly `/dev/ttyACM0` on Linux):
+```
+Finding all available ports for the MotorBus.
+['/dev/tty.usbmodem575E0032081', '/dev/tty.usbmodem575E0031751']
+Remove the usb cable from your DynamixelMotorsBus and press Enter when done.
+
+[...Disconnect leader arm and press Enter...]
+
+The port of this DynamixelMotorsBus is /dev/tty.usbmodem575E0031751
+Reconnect the usb cable.
+```
+
+Example output when identifying the follower arm's port (e.g., `/dev/tty.usbmodem575E0032081`, or possibly `/dev/ttyACM1` on Linux):
+```
+Finding all available ports for the MotorBus.
+['/dev/tty.usbmodem575E0032081', '/dev/tty.usbmodem575E0031751']
+Remove the usb cable from your DynamixelMotorsBus and press Enter when done.
+
+[...Disconnect follower arm and press Enter...]
+
+The port of this DynamixelMotorsBus is /dev/tty.usbmodem575E0032081
+Reconnect the usb cable.
+```
+
+Troubleshooting: On Linux, you might need to give access to the USB ports by running:
+```bash
+sudo chmod 666 /dev/ttyACM0
+sudo chmod 666 /dev/ttyACM1
+```
+
+**Configure your motors**
+Plug your first motor and run this script to set its ID to 1. It will also set its present position to 2048, so expect your motor to rotate:
+```bash
+python lerobot/scripts/configure_motor.py \
+ --port /dev/tty.usbmodem58760432961 \
+ --brand feetech \
+ --model sts3215 \
+ --baudrate 1000000 \
+ --ID 1
+```
+
+Note: These motors are currently limitated. They can take values between 0 and 4096 only, which corresponds to a full turn. They can't turn more than that. 2048 is at the middle of this range, so we can take -2048 steps (180 degrees anticlockwise) and reach the maximum range, or take +2048 steps (180 degrees clockwise) and reach the maximum range. The configuration step also sets the homing offset to 0, so that if you misassembled the arm, you can always update the homing offset to account for a shift up to ± 2048 steps (± 180 degrees).
+
+Then unplug your motor and plug the second motor and set its ID to 2.
+```bash
+python lerobot/scripts/configure_motor.py \
+ --port /dev/tty.usbmodem58760432961 \
+ --brand feetech \
+ --model sts3215 \
+ --baudrate 1000000 \
+ --ID 2
+```
+
+Redo the process for all your motors until ID 6. Do the same for the 6 motors of the leader arm.
+
+**Remove the gears of the 6 leader motors**
+Follow step 2 of the [assembly video](https://www.youtube.com/watch?v=FioA2oeFZ5I). You need to remove the gear for the motors of the leader arm. As a result, you will only use the position encoding of the motor and reduce friction to more easily operate the leader arm.
+
+**Add motor horn to the motors**
+Follow step 3 of the [assembly video](https://www.youtube.com/watch?v=FioA2oeFZ5I). For SO-100, you need to align the holes on the motor horn to the motor spline to be approximately 1:30, 4:30, 7:30 and 10:30.
+Try to avoid rotating the motor while doing so to keep position 2048 set during configuration. It is especially tricky for the leader motors as it is more sensible without the gears, but it's ok if it's a bit rotated.
+
+## Assemble the arms
+
+Follow step 4 of the [assembly video](https://www.youtube.com/watch?v=FioA2oeFZ5I). The first arm should take a bit more than 1 hour to assemble, but once you get use to it, you can do it under 1 hour for the second arm.
+
+## Calibrate
+
+Next, you'll need to calibrate your SO-100 robot to ensure that the leader and follower arms have the same position values when they are in the same physical position. This calibration is essential because it allows a neural network trained on one SO-100 robot to work on another.
+
+**Manual calibration of follower arm**
+/!\ Contrarily to step 6 of the [assembly video](https://www.youtube.com/watch?v=FioA2oeFZ5I) which illustrates the auto calibration, we will actually do manual calibration of follower for now.
+
+You will need to move the follower arm to these positions sequentially:
+
+| 1. Zero position | 2. Rotated position | 3. Rest position |
+|---|---|---|
+| | | |
+
+Make sure both arms are connected and run this script to launch manual calibration:
+```bash
+python lerobot/scripts/control_robot.py calibrate \
+ --robot-path lerobot/configs/robot/so100.yaml \
+ --robot-overrides '~cameras' --arms main_follower
+```
+
+**Manual calibration of leader arm**
+Follow step 6 of the [assembly video](https://www.youtube.com/watch?v=FioA2oeFZ5I) which illustrates the manual calibration. You will need to move the leader arm to these positions sequentially:
+
+| 1. Zero position | 2. Rotated position | 3. Rest position |
+|---|---|---|
+| | | |
+
+Run this script to launch manual calibration:
+```bash
+python lerobot/scripts/control_robot.py calibrate \
+ --robot-path lerobot/configs/robot/so100.yaml \
+ --robot-overrides '~cameras' --arms main_leader
+```
+
+## Teleoperate
+
+**Simple teleop**
+Then you are ready to teleoperate your robot! Run this simple script (it won't connect and display the cameras):
+```bash
+python lerobot/scripts/control_robot.py teleoperate \
+ --robot-path lerobot/configs/robot/so100.yaml \
+ --robot-overrides '~cameras' \
+ --display-cameras 0
+```
+
+
+**Teleop with displaying cameras**
+Follow [this guide to setup your cameras](https://github.com/huggingface/lerobot/blob/main/examples/7_get_started_with_real_robot.md#c-add-your-cameras-with-opencvcamera). Then you will be able to display the cameras on your computer while you are teleoperating by running the following code. This is useful to prepare your setup before recording your first dataset.
+```bash
+python lerobot/scripts/control_robot.py teleoperate \
+ --robot-path lerobot/configs/robot/so100.yaml
+```
+
+## Record a dataset
+
+Once you're familiar with teleoperation, you can record your first dataset with SO-100.
+
+If you want to use the Hugging Face hub features for uploading your dataset and you haven't previously done it, make sure you've logged in using a write-access token, which can be generated from the [Hugging Face settings](https://huggingface.co/settings/tokens):
+```bash
+huggingface-cli login --token ${HUGGINGFACE_TOKEN} --add-to-git-credential
+```
+
+Store your Hugging Face repository name in a variable to run these commands:
+```bash
+HF_USER=$(huggingface-cli whoami | head -n 1)
+echo $HF_USER
+```
+
+Record 2 episodes and upload your dataset to the hub:
+```bash
+python lerobot/scripts/control_robot.py record \
+ --robot-path lerobot/configs/robot/so100.yaml \
+ --fps 30 \
+ --repo-id ${HF_USER}/so100_test \
+ --tags so100 tutorial \
+ --warmup-time-s 5 \
+ --episode-time-s 40 \
+ --reset-time-s 10 \
+ --num-episodes 2 \
+ --push-to-hub 1
+```
+
+## Visualize a dataset
+
+If you uploaded your dataset to the hub with `--push-to-hub 1`, you can [visualize your dataset online](https://huggingface.co/spaces/lerobot/visualize_dataset) by copy pasting your repo id given by:
+```bash
+echo ${HF_USER}/so100_test
+```
+
+If you didn't upload with `--push-to-hub 0`, you can also visualize it locally with:
+```bash
+python lerobot/scripts/visualize_dataset_html.py \
+ --repo-id ${HF_USER}/so100_test
+```
+
+## Replay an episode
+
+Now try to replay the first episode on your robot:
+```bash
+python lerobot/scripts/control_robot.py replay \
+ --robot-path lerobot/configs/robot/so100.yaml \
+ --fps 30 \
+ --repo-id ${HF_USER}/so100_test \
+ --episode 0
+```
+
+## Train a policy
+
+To train a policy to control your robot, use the [`python lerobot/scripts/train.py`](../lerobot/scripts/train.py) script. A few arguments are required. Here is an example command:
+```bash
+python lerobot/scripts/train.py \
+ dataset_repo_id=${HF_USER}/so100_test \
+ policy=act_so100_real \
+ env=so100_real \
+ hydra.run.dir=outputs/train/act_so100_test \
+ hydra.job.name=act_so100_test \
+ device=cuda \
+ wandb.enable=true
+```
+
+Let's explain it:
+1. We provided the dataset as argument with `dataset_repo_id=${HF_USER}/so100_test`.
+2. We provided the policy with `policy=act_so100_real`. This loads configurations from [`lerobot/configs/policy/act_so100_real.yaml`](../lerobot/configs/policy/act_so100_real.yaml). Importantly, this policy uses 2 cameras as input `laptop`, `phone`.
+3. We provided an environment as argument with `env=so100_real`. This loads configurations from [`lerobot/configs/env/so100_real.yaml`](../lerobot/configs/env/so100_real.yaml).
+4. We provided `device=cuda` since we are training on a Nvidia GPU, but you can also use `device=mps` if you are using a Mac with Apple silicon, or `device=cpu` otherwise.
+5. We provided `wandb.enable=true` to use [Weights and Biases](https://docs.wandb.ai/quickstart) for visualizing training plots. This is optional but if you use it, make sure you are logged in by running `wandb login`.
+
+Training should take several hours. You will find checkpoints in `outputs/train/act_so100_test/checkpoints`.
+
+## Evaluate your policy
+
+You can use the `record` function from [`lerobot/scripts/control_robot.py`](../lerobot/scripts/control_robot.py) but with a policy checkpoint as input. For instance, run this command to record 10 evaluation episodes:
+```bash
+python lerobot/scripts/control_robot.py record \
+ --robot-path lerobot/configs/robot/so100.yaml \
+ --fps 30 \
+ --repo-id ${HF_USER}/eval_act_so100_test \
+ --tags so100 tutorial eval \
+ --warmup-time-s 5 \
+ --episode-time-s 40 \
+ --reset-time-s 10 \
+ --num-episodes 10 \
+ -p outputs/train/act_so100_test/checkpoints/last/pretrained_model
+```
+
+As you can see, it's almost the same command as previously used to record your training dataset. Two things changed:
+1. There is an additional `-p` argument which indicates the path to your policy checkpoint with (e.g. `-p outputs/train/eval_so100_test/checkpoints/last/pretrained_model`). You can also use the model repository if you uploaded a model checkpoint to the hub (e.g. `-p ${HF_USER}/act_so100_test`).
+2. The name of dataset begins by `eval` to reflect that you are running inference (e.g. `--repo-id ${HF_USER}/eval_act_so100_test`).
+
+## More
+
+Follow this [previous tutorial](https://github.com/huggingface/lerobot/blob/main/examples/7_get_started_with_real_robot.md#4-train-a-policy-on-your-data) for a more in-depth tutorial on controlling real robots with LeRobot.
+
+If you have any question or need help, please reach out on Discord in the channel [`#so100-arm`](https://discord.com/channels/1216765309076115607/1237741463832363039).
diff --git a/examples/11_use_moss.md b/examples/11_use_moss.md
new file mode 100644
index 000000000..55d6fcaf9
--- /dev/null
+++ b/examples/11_use_moss.md
@@ -0,0 +1,275 @@
+This tutorial explains how to use [Moss v1](https://github.com/jess-moss/moss-robot-arms) with LeRobot.
+
+## Source the parts
+
+Follow this [README](https://github.com/jess-moss/moss-robot-arms). It contains the bill of materials, with link to source the parts, as well as the instructions to 3D print the parts, and advices if it's your first time printing or if you don't own a 3D printer already.
+
+**Important**: Before assembling, you will first need to configure your motors. To this end, we provide a nice script, so let's first install LeRobot. After configuration, we will also guide you through assembly.
+
+## Install LeRobot
+
+On your computer:
+
+1. [Install Miniconda](https://docs.anaconda.com/miniconda/#quick-command-line-install):
+```bash
+mkdir -p ~/miniconda3
+wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda3/miniconda.sh
+bash ~/miniconda3/miniconda.sh -b -u -p ~/miniconda3
+rm ~/miniconda3/miniconda.sh
+~/miniconda3/bin/conda init bash
+```
+
+2. Restart shell or `source ~/.bashrc`
+
+3. Create and activate a fresh conda environment for lerobot
+```bash
+conda create -y -n lerobot python=3.10 && conda activate lerobot
+```
+
+4. Clone LeRobot:
+```bash
+git clone https://github.com/huggingface/lerobot.git ~/lerobot
+```
+
+5. Install LeRobot with dependencies for the feetech motors:
+```bash
+cd ~/lerobot && pip install -e ".[feetech]"
+```
+
+For Linux only (not Mac), install extra dependencies for recording datasets:
+```bash
+conda install -y -c conda-forge ffmpeg
+pip uninstall -y opencv-python
+conda install -y -c conda-forge "opencv>=4.10.0"
+```
+
+## Configure the motors
+
+Follow steps 1 of the [assembly video](https://www.youtube.com/watch?v=DA91NJOtMic) which illustrates the use of our scripts below.
+
+**Find USB ports associated to your arms**
+To find the correct ports for each arm, run the utility script twice:
+```bash
+python lerobot/scripts/find_motors_bus_port.py
+```
+
+Example output when identifying the leader arm's port (e.g., `/dev/tty.usbmodem575E0031751` on Mac, or possibly `/dev/ttyACM0` on Linux):
+```
+Finding all available ports for the MotorBus.
+['/dev/tty.usbmodem575E0032081', '/dev/tty.usbmodem575E0031751']
+Remove the usb cable from your DynamixelMotorsBus and press Enter when done.
+
+[...Disconnect leader arm and press Enter...]
+
+The port of this DynamixelMotorsBus is /dev/tty.usbmodem575E0031751
+Reconnect the usb cable.
+```
+
+Example output when identifying the follower arm's port (e.g., `/dev/tty.usbmodem575E0032081`, or possibly `/dev/ttyACM1` on Linux):
+```
+Finding all available ports for the MotorBus.
+['/dev/tty.usbmodem575E0032081', '/dev/tty.usbmodem575E0031751']
+Remove the usb cable from your DynamixelMotorsBus and press Enter when done.
+
+[...Disconnect follower arm and press Enter...]
+
+The port of this DynamixelMotorsBus is /dev/tty.usbmodem575E0032081
+Reconnect the usb cable.
+```
+
+Troubleshooting: On Linux, you might need to give access to the USB ports by running:
+```bash
+sudo chmod 666 /dev/ttyACM0
+sudo chmod 666 /dev/ttyACM1
+```
+
+**Configure your motors**
+Plug your first motor and run this script to set its ID to 1. It will also set its present position to 2048, so expect your motor to rotate:
+```bash
+python lerobot/scripts/configure_motor.py \
+ --port /dev/tty.usbmodem58760432961 \
+ --brand feetech \
+ --model sts3215 \
+ --baudrate 1000000 \
+ --ID 1
+```
+
+Note: These motors are currently limitated. They can take values between 0 and 4096 only, which corresponds to a full turn. They can't turn more than that. 2048 is at the middle of this range, so we can take -2048 steps (180 degrees anticlockwise) and reach the maximum range, or take +2048 steps (180 degrees clockwise) and reach the maximum range. The configuration step also sets the homing offset to 0, so that if you misassembled the arm, you can always update the homing offset to account for a shift up to ± 2048 steps (± 180 degrees).
+
+Then unplug your motor and plug the second motor and set its ID to 2.
+```bash
+python lerobot/scripts/configure_motor.py \
+ --port /dev/tty.usbmodem58760432961 \
+ --brand feetech \
+ --model sts3215 \
+ --baudrate 1000000 \
+ --ID 2
+```
+
+Redo the process for all your motors until ID 6. Do the same for the 6 motors of the leader arm.
+
+**Remove the gears of the 6 leader motors**
+Follow step 2 of the [assembly video](https://www.youtube.com/watch?v=DA91NJOtMic). You need to remove the gear for the motors of the leader arm. As a result, you will only use the position encoding of the motor and reduce friction to more easily operate the leader arm.
+
+**Add motor horn to the motors**
+Follow step 3 of the [assembly video](https://www.youtube.com/watch?v=DA91NJOtMic). For Moss v1, you need to align the holes on the motor horn to the motor spline to be approximately 3, 6, 9 and 12 o'clock.
+Try to avoid rotating the motor while doing so to keep position 2048 set during configuration. It is especially tricky for the leader motors as it is more sensible without the gears, but it's ok if it's a bit rotated.
+
+## Assemble the arms
+
+Follow step 4 of the [assembly video](https://www.youtube.com/watch?v=DA91NJOtMic). The first arm should take a bit more than 1 hour to assemble, but once you get use to it, you can do it under 1 hour for the second arm.
+
+## Calibrate
+
+Next, you'll need to calibrate your Moss v1 robot to ensure that the leader and follower arms have the same position values when they are in the same physical position. This calibration is essential because it allows a neural network trained on one Moss v1 robot to work on another.
+
+**Manual calibration of follower arm**
+/!\ Contrarily to step 6 of the [assembly video](https://www.youtube.com/watch?v=DA91NJOtMic) which illustrates the auto calibration, we will actually do manual calibration of follower for now.
+
+You will need to move the follower arm to these positions sequentially:
+
+| 1. Zero position | 2. Rotated position | 3. Rest position |
+|---|---|---|
+| | | |
+
+Make sure both arms are connected and run this script to launch manual calibration:
+```bash
+python lerobot/scripts/control_robot.py calibrate \
+ --robot-path lerobot/configs/robot/moss.yaml \
+ --robot-overrides '~cameras' --arms main_follower
+```
+
+**Manual calibration of leader arm**
+Follow step 6 of the [assembly video](https://www.youtube.com/watch?v=DA91NJOtMic) which illustrates the manual calibration. You will need to move the leader arm to these positions sequentially:
+
+| 1. Zero position | 2. Rotated position | 3. Rest position |
+|---|---|---|
+| | | |
+
+Run this script to launch manual calibration:
+```bash
+python lerobot/scripts/control_robot.py calibrate \
+ --robot-path lerobot/configs/robot/moss.yaml \
+ --robot-overrides '~cameras' --arms main_leader
+```
+
+## Teleoperate
+
+**Simple teleop**
+Then you are ready to teleoperate your robot! Run this simple script (it won't connect and display the cameras):
+```bash
+python lerobot/scripts/control_robot.py teleoperate \
+ --robot-path lerobot/configs/robot/moss.yaml \
+ --robot-overrides '~cameras' \
+ --display-cameras 0
+```
+
+
+**Teleop with displaying cameras**
+Follow [this guide to setup your cameras](https://github.com/huggingface/lerobot/blob/main/examples/7_get_started_with_real_robot.md#c-add-your-cameras-with-opencvcamera). Then you will be able to display the cameras on your computer while you are teleoperating by running the following code. This is useful to prepare your setup before recording your first dataset.
+```bash
+python lerobot/scripts/control_robot.py teleoperate \
+ --robot-path lerobot/configs/robot/moss.yaml
+```
+
+## Record a dataset
+
+Once you're familiar with teleoperation, you can record your first dataset with Moss v1.
+
+If you want to use the Hugging Face hub features for uploading your dataset and you haven't previously done it, make sure you've logged in using a write-access token, which can be generated from the [Hugging Face settings](https://huggingface.co/settings/tokens):
+```bash
+huggingface-cli login --token ${HUGGINGFACE_TOKEN} --add-to-git-credential
+```
+
+Store your Hugging Face repository name in a variable to run these commands:
+```bash
+HF_USER=$(huggingface-cli whoami | head -n 1)
+echo $HF_USER
+```
+
+Record 2 episodes and upload your dataset to the hub:
+```bash
+python lerobot/scripts/control_robot.py record \
+ --robot-path lerobot/configs/robot/moss.yaml \
+ --fps 30 \
+ --repo-id ${HF_USER}/moss_test \
+ --tags moss tutorial \
+ --warmup-time-s 5 \
+ --episode-time-s 40 \
+ --reset-time-s 10 \
+ --num-episodes 2 \
+ --push-to-hub 1
+```
+
+## Visualize a dataset
+
+If you uploaded your dataset to the hub with `--push-to-hub 1`, you can [visualize your dataset online](https://huggingface.co/spaces/lerobot/visualize_dataset) by copy pasting your repo id given by:
+```bash
+echo ${HF_USER}/moss_test
+```
+
+If you didn't upload with `--push-to-hub 0`, you can also visualize it locally with:
+```bash
+python lerobot/scripts/visualize_dataset_html.py \
+ --repo-id ${HF_USER}/moss_test
+```
+
+## Replay an episode
+
+Now try to replay the first episode on your robot:
+```bash
+python lerobot/scripts/control_robot.py replay \
+ --robot-path lerobot/configs/robot/moss.yaml \
+ --fps 30 \
+ --repo-id ${HF_USER}/moss_test \
+ --episode 0
+```
+
+## Train a policy
+
+To train a policy to control your robot, use the [`python lerobot/scripts/train.py`](../lerobot/scripts/train.py) script. A few arguments are required. Here is an example command:
+```bash
+python lerobot/scripts/train.py \
+ dataset_repo_id=${HF_USER}/moss_test \
+ policy=act_moss_real \
+ env=moss_real \
+ hydra.run.dir=outputs/train/act_moss_test \
+ hydra.job.name=act_moss_test \
+ device=cuda \
+ wandb.enable=true
+```
+
+Let's explain it:
+1. We provided the dataset as argument with `dataset_repo_id=${HF_USER}/moss_test`.
+2. We provided the policy with `policy=act_moss_real`. This loads configurations from [`lerobot/configs/policy/act_moss_real.yaml`](../lerobot/configs/policy/act_moss_real.yaml). Importantly, this policy uses 2 cameras as input `laptop`, `phone`.
+3. We provided an environment as argument with `env=moss_real`. This loads configurations from [`lerobot/configs/env/moss_real.yaml`](../lerobot/configs/env/moss_real.yaml).
+4. We provided `device=cuda` since we are training on a Nvidia GPU, but you can also use `device=mps` if you are using a Mac with Apple silicon, or `device=cpu` otherwise.
+5. We provided `wandb.enable=true` to use [Weights and Biases](https://docs.wandb.ai/quickstart) for visualizing training plots. This is optional but if you use it, make sure you are logged in by running `wandb login`.
+
+Training should take several hours. You will find checkpoints in `outputs/train/act_moss_test/checkpoints`.
+
+## Evaluate your policy
+
+You can use the `record` function from [`lerobot/scripts/control_robot.py`](../lerobot/scripts/control_robot.py) but with a policy checkpoint as input. For instance, run this command to record 10 evaluation episodes:
+```bash
+python lerobot/scripts/control_robot.py record \
+ --robot-path lerobot/configs/robot/moss.yaml \
+ --fps 30 \
+ --repo-id ${HF_USER}/eval_act_moss_test \
+ --tags moss tutorial eval \
+ --warmup-time-s 5 \
+ --episode-time-s 40 \
+ --reset-time-s 10 \
+ --num-episodes 10 \
+ -p outputs/train/act_moss_test/checkpoints/last/pretrained_model
+```
+
+As you can see, it's almost the same command as previously used to record your training dataset. Two things changed:
+1. There is an additional `-p` argument which indicates the path to your policy checkpoint with (e.g. `-p outputs/train/eval_moss_test/checkpoints/last/pretrained_model`). You can also use the model repository if you uploaded a model checkpoint to the hub (e.g. `-p ${HF_USER}/act_moss_test`).
+2. The name of dataset begins by `eval` to reflect that you are running inference (e.g. `--repo-id ${HF_USER}/eval_act_moss_test`).
+
+## More
+
+Follow this [previous tutorial](https://github.com/huggingface/lerobot/blob/main/examples/7_get_started_with_real_robot.md#4-train-a-policy-on-your-data) for a more in-depth tutorial on controlling real robots with LeRobot.
+
+If you have any question or need help, please reach out on Discord in the channel [`#moss-arm`](https://discord.com/channels/1216765309076115607/1275374638985252925).
diff --git a/examples/12_train_policy_accelerate.py b/examples/12_train_policy_accelerate.py
new file mode 100644
index 000000000..bc6b5b85d
--- /dev/null
+++ b/examples/12_train_policy_accelerate.py
@@ -0,0 +1,106 @@
+"""
+This script demonstrates how to train ACT policy with distributed training on the Aloha environment
+on the Transfer cube task, using HuggingFace accelerate.
+
+Apart from the main installation procedure, please also make sure you have installed accelerate before running this script: `pip install accelerate`.
+
+To launch it, you will have to use the accelerate launcher, for example:
+`python -m accelerate.commands.launch examples/8_train_policy_distributed.py`. This will launch the script with default distributed parameters.
+To launch on two GPUs, you can use `python -m accelerate.commands.launch --num_processes 2 lerobot/examples/7_train_policy_distributed.py`.
+
+Find detailed information in the documentation: `https://github.com/huggingface/accelerate`.
+"""
+
+from pathlib import Path
+
+import torch
+from accelerate import Accelerator
+
+from lerobot.common.datasets.lerobot_dataset import LeRobotDataset
+from lerobot.common.policies.act.configuration_act import ACTConfig
+from lerobot.common.policies.act.modeling_act import ACTPolicy
+
+# Create a directory to store the training checkpoint.
+output_directory = Path("outputs/train/example_aloha_act_distributed")
+output_directory.mkdir(parents=True, exist_ok=True)
+
+# Number of overall offline training steps
+training_steps = 5000
+log_freq = 250
+
+# The chunk size is the number of actions that the policy will predict.
+chunk_size = 100
+
+delta_timestamps = {
+ "action":
+ # Load the current action, the next 100 actions to be executed, because we train the policy
+ # to predict the next 100 actions. Two frames differ by 1/50 seconds, which corresponds to the FPS of this dataset.
+ [i / 50 for i in range(chunk_size)]
+}
+
+
+def train():
+ # We prepare for distributed training using the Accelerator.
+ accelerator = Accelerator()
+ device = accelerator.device
+
+ # Set up the dataset.
+ dataset = LeRobotDataset("lerobot/aloha_sim_transfer_cube_human_image", delta_timestamps=delta_timestamps)
+ accelerator.print(f"Loaded dataset with {len(dataset)} samples.")
+
+ # The policy is initialized with a configuration class, in this case `ACTConfig`.
+ cfg = ACTConfig(chunk_size=chunk_size)
+ policy = ACTPolicy(cfg, dataset_stats=dataset.stats)
+ policy.train()
+ num_total_params = sum(p.numel() for p in policy.parameters() if p.requires_grad)
+ accelerator.print(f"Policy initialized with {num_total_params} parameters.")
+
+ optimizer = torch.optim.Adam(policy.parameters(), lr=1e-5)
+
+ dataloader = torch.utils.data.DataLoader(
+ dataset,
+ num_workers=4,
+ batch_size=8,
+ shuffle=True,
+ pin_memory=device != torch.device("cpu"),
+ drop_last=True,
+ )
+
+ # Prepare the policy, optimizer, and dataloader for distributed training.
+ # This will wrap the policy in a DistributedDataParallel and apply torch.autocast to the forward functions.
+ policy, optimizer, dataloader = accelerator.prepare(policy, optimizer, dataloader)
+
+ policy.to(device)
+
+ step = 0
+ done = False
+ while not done:
+ for batch in dataloader:
+ batch = {k: v.to(device) for k, v in batch.items()}
+ output_dict = policy.forward(batch)
+
+ loss = output_dict["loss"].mean()
+ accelerator.backward(loss)
+
+ optimizer.step()
+ optimizer.zero_grad()
+
+ step += 1
+
+ if step % log_freq == 0:
+ accelerator.print(f"step: {step} loss: {loss.item():.3f}")
+
+ if step >= training_steps:
+ done = True
+ break
+
+ # Unwrap the policy of its distributed training wrapper and save it.
+ unwrapped_policy = accelerator.unwrap_model(policy)
+ unwrapped_policy.save_pretrained(output_directory)
+
+ accelerator.print("Finished offline training")
+
+
+# We need to add a call to the training function in order to be able to use the Accelerator.
+if __name__ == "__main__":
+ train()
diff --git a/examples/1_load_lerobot_dataset.py b/examples/1_load_lerobot_dataset.py
index 3846926a6..96c104b68 100644
--- a/examples/1_load_lerobot_dataset.py
+++ b/examples/1_load_lerobot_dataset.py
@@ -3,78 +3,120 @@
It illustrates how to load datasets, manipulate them, and apply transformations suitable for machine learning tasks in PyTorch.
Features included in this script:
-- Loading a dataset and accessing its properties.
-- Filtering data by episode number.
-- Converting tensor data for visualization.
-- Saving video files from dataset frames.
+- Viewing a dataset's metadata and exploring its properties.
+- Loading an existing dataset from the hub or a subset of it.
+- Accessing frames by episode number.
- Using advanced dataset features like timestamp-based frame selection.
- Demonstrating compatibility with PyTorch DataLoader for batch processing.
The script ends with examples of how to batch process data using PyTorch's DataLoader.
"""
-from pathlib import Path
from pprint import pprint
-import imageio
import torch
+from huggingface_hub import HfApi
import lerobot
-from lerobot.common.datasets.lerobot_dataset import LeRobotDataset
+from lerobot.common.datasets.lerobot_dataset import LeRobotDataset, LeRobotDatasetMetadata
+# We ported a number of existing datasets ourselves, use this to see the list:
print("List of available datasets:")
pprint(lerobot.available_datasets)
-# Let's take one for this example
-repo_id = "lerobot/pusht"
-
-# You can easily load a dataset from a Hugging Face repository
+# You can also browse through the datasets created/ported by the community on the hub using the hub api:
+hub_api = HfApi()
+repo_ids = [info.id for info in hub_api.list_datasets(task_categories="robotics", tags=["LeRobot"])]
+pprint(repo_ids)
+
+# Or simply explore them in your web browser directly at:
+# https://huggingface.co/datasets?other=LeRobot
+
+# Let's take this one for this example
+repo_id = "lerobot/aloha_mobile_cabinet"
+# We can have a look and fetch its metadata to know more about it:
+ds_meta = LeRobotDatasetMetadata(repo_id)
+
+# By instantiating just this class, you can quickly access useful information about the content and the
+# structure of the dataset without downloading the actual data yet (only metadata files — which are
+# lightweight).
+print(f"Total number of episodes: {ds_meta.total_episodes}")
+print(f"Average number of frames per episode: {ds_meta.total_frames / ds_meta.total_episodes:.3f}")
+print(f"Frames per second used during data collection: {ds_meta.fps}")
+print(f"Robot type: {ds_meta.robot_type}")
+print(f"keys to access images from cameras: {ds_meta.camera_keys=}\n")
+
+print("Tasks:")
+print(ds_meta.tasks)
+print("Features:")
+pprint(ds_meta.features)
+
+# You can also get a short summary by simply printing the object:
+print(ds_meta)
+
+# You can then load the actual dataset from the hub.
+# Either load any subset of episodes:
+dataset = LeRobotDataset(repo_id, episodes=[0, 10, 11, 23])
+
+# And see how many frames you have:
+print(f"Selected episodes: {dataset.episodes}")
+print(f"Number of episodes selected: {dataset.num_episodes}")
+print(f"Number of frames selected: {dataset.num_frames}")
+
+# Or simply load the entire dataset:
dataset = LeRobotDataset(repo_id)
+print(f"Number of episodes selected: {dataset.num_episodes}")
+print(f"Number of frames selected: {dataset.num_frames}")
-# LeRobotDataset is actually a thin wrapper around an underlying Hugging Face dataset
-# (see https://huggingface.co/docs/datasets/index for more information).
-print(dataset)
-print(dataset.hf_dataset)
+# The previous metadata class is contained in the 'meta' attribute of the dataset:
+print(dataset.meta)
-# And provides additional utilities for robotics and compatibility with Pytorch
-print(f"\naverage number of frames per episode: {dataset.num_samples / dataset.num_episodes:.3f}")
-print(f"frames per second used during data collection: {dataset.fps=}")
-print(f"keys to access images from cameras: {dataset.camera_keys=}\n")
+# LeRobotDataset actually wraps an underlying Hugging Face dataset
+# (see https://huggingface.co/docs/datasets for more information).
+print(dataset.hf_dataset)
-# Access frame indexes associated to first episode
+# LeRobot datasets also subclasses PyTorch datasets so you can do everything you know and love from working
+# with the latter, like iterating through the dataset.
+# The __getitem__ iterates over the frames of the dataset. Since our datasets are also structured by
+# episodes, you can access the frame indices of any episode using the episode_data_index. Here, we access
+# frame indices associated to the first episode:
episode_index = 0
from_idx = dataset.episode_data_index["from"][episode_index].item()
to_idx = dataset.episode_data_index["to"][episode_index].item()
-# LeRobot datasets actually subclass PyTorch datasets so you can do everything you know and love from working
-# with the latter, like iterating through the dataset. Here we grab all the image frames.
-frames = [dataset[idx]["observation.image"] for idx in range(from_idx, to_idx)]
+# Then we grab all the image frames from the first camera:
+camera_key = dataset.meta.camera_keys[0]
+frames = [dataset[idx][camera_key] for idx in range(from_idx, to_idx)]
-# Video frames are now float32 in range [0,1] channel first (c,h,w) to follow pytorch convention. To visualize
-# them, we convert to uint8 in range [0,255]
-frames = [(frame * 255).type(torch.uint8) for frame in frames]
-# and to channel last (h,w,c).
-frames = [frame.permute((1, 2, 0)).numpy() for frame in frames]
+# The objects returned by the dataset are all torch.Tensors
+print(type(frames[0]))
+print(frames[0].shape)
-# Finally, we save the frames to a mp4 video for visualization.
-Path("outputs/examples/1_load_lerobot_dataset").mkdir(parents=True, exist_ok=True)
-imageio.mimsave("outputs/examples/1_load_lerobot_dataset/episode_0.mp4", frames, fps=dataset.fps)
+# Since we're using pytorch, the shape is in pytorch, channel-first convention (c, h, w).
+# We can compare this shape with the information available for that feature
+pprint(dataset.features[camera_key])
+# In particular:
+print(dataset.features[camera_key]["shape"])
+# The shape is in (h, w, c) which is a more universal format.
# For many machine learning applications we need to load the history of past observations or trajectories of
# future actions. Our datasets can load previous and future frames for each key/modality, using timestamps
# differences with the current loaded frame. For instance:
delta_timestamps = {
# loads 4 images: 1 second before current frame, 500 ms before, 200 ms before, and current frame
- "observation.image": [-1, -0.5, -0.20, 0],
- # loads 8 state vectors: 1.5 seconds before, 1 second before, ... 20 ms, 10 ms, and current frame
- "observation.state": [-1.5, -1, -0.5, -0.20, -0.10, -0.02, -0.01, 0],
+ camera_key: [-1, -0.5, -0.20, 0],
+ # loads 8 state vectors: 1.5 seconds before, 1 second before, ... 200 ms, 100 ms, and current frame
+ "observation.state": [-1.5, -1, -0.5, -0.20, -0.10, 0],
# loads 64 action vectors: current frame, 1 frame in the future, 2 frames, ... 63 frames in the future
"action": [t / dataset.fps for t in range(64)],
}
+# Note that in any case, these delta_timestamps values need to be multiples of (1/fps) so that added to any
+# timestamp, you still get a valid timestamp.
+
dataset = LeRobotDataset(repo_id, delta_timestamps=delta_timestamps)
-print(f"\n{dataset[0]['observation.image'].shape=}") # (4,c,h,w)
-print(f"{dataset[0]['observation.state'].shape=}") # (8,c)
-print(f"{dataset[0]['action'].shape=}\n") # (64,c)
+print(f"\n{dataset[0][camera_key].shape=}") # (4, c, h, w)
+print(f"{dataset[0]['observation.state'].shape=}") # (6, c)
+print(f"{dataset[0]['action'].shape=}\n") # (64, c)
# Finally, our datasets are fully compatible with PyTorch dataloaders and samplers because they are just
# PyTorch datasets.
@@ -84,8 +126,9 @@
batch_size=32,
shuffle=True,
)
+
for batch in dataloader:
- print(f"{batch['observation.image'].shape=}") # (32,4,c,h,w)
- print(f"{batch['observation.state'].shape=}") # (32,8,c)
- print(f"{batch['action'].shape=}") # (32,64,c)
+ print(f"{batch[camera_key].shape=}") # (32, 4, c, h, w)
+ print(f"{batch['observation.state'].shape=}") # (32, 5, c)
+ print(f"{batch['action'].shape=}") # (32, 64, c)
break
diff --git a/examples/3_train_policy.py b/examples/3_train_policy.py
index c5ce0d184..935ab2dbf 100644
--- a/examples/3_train_policy.py
+++ b/examples/3_train_policy.py
@@ -40,7 +40,7 @@
# For this example, no arguments need to be passed because the defaults are set up for PushT.
# If you're doing something different, you will likely need to change at least some of the defaults.
cfg = DiffusionConfig()
-policy = DiffusionPolicy(cfg, dataset_stats=dataset.stats)
+policy = DiffusionPolicy(cfg, dataset_stats=dataset.meta.stats)
policy.train()
policy.to(device)
diff --git a/examples/6_add_image_transforms.py b/examples/6_add_image_transforms.py
index bdcc6d7b9..82b70f5c1 100644
--- a/examples/6_add_image_transforms.py
+++ b/examples/6_add_image_transforms.py
@@ -1,7 +1,7 @@
"""
This script demonstrates how to use torchvision's image transformation with LeRobotDataset for data
augmentation purposes. The transformations are passed to the dataset as an argument upon creation, and
-transforms are applied to the observation images before they are returned in the dataset's __get_item__.
+transforms are applied to the observation images before they are returned in the dataset's __getitem__.
"""
from pathlib import Path
@@ -20,7 +20,7 @@
first_idx = dataset.episode_data_index["from"][0].item()
# Get the frame corresponding to the first camera
-frame = dataset[first_idx][dataset.camera_keys[0]]
+frame = dataset[first_idx][dataset.meta.camera_keys[0]]
# Define the transformations
@@ -36,7 +36,7 @@
transformed_dataset = LeRobotDataset(dataset_repo_id, image_transforms=transforms)
# Get a frame from the transformed dataset
-transformed_frame = transformed_dataset[first_idx][transformed_dataset.camera_keys[0]]
+transformed_frame = transformed_dataset[first_idx][transformed_dataset.meta.camera_keys[0]]
# Create a directory to store output images
output_dir = Path("outputs/image_transforms")
diff --git a/examples/7_get_started_with_real_robot.md b/examples/7_get_started_with_real_robot.md
index 50a2c6452..76408275d 100644
--- a/examples/7_get_started_with_real_robot.md
+++ b/examples/7_get_started_with_real_robot.md
@@ -11,7 +11,7 @@ This tutorial will guide you through the process of setting up and training a ne
By following these steps, you'll be able to replicate tasks like picking up a Lego block and placing it in a bin with a high success rate, as demonstrated in [this video](https://x.com/RemiCadene/status/1814680760592572934).
-This tutorial is specifically made for the affordable [Koch v1.1](https://github.com/jess-moss/koch-v1-1) robot, but it contains additional information to be easily adapted to various types of robots like [Aloha bimanual robot](aloha-2.github.io) by changing some configurations. The Koch v1.1 consists of a leader arm and a follower arm, each with 6 motors. It can work with one or several cameras to record the scene, which serve as visual sensors for the robot.
+This tutorial is specifically made for the affordable [Koch v1.1](https://github.com/jess-moss/koch-v1-1) robot, but it contains additional information to be easily adapted to various types of robots like [Aloha bimanual robot](https://aloha-2.github.io) by changing some configurations. The Koch v1.1 consists of a leader arm and a follower arm, each with 6 motors. It can work with one or several cameras to record the scene, which serve as visual sensors for the robot.
During the data collection phase, you will control the follower arm by moving the leader arm. This process is known as "teleoperation." This technique is used to collect robot trajectories. Afterward, you'll train a neural network to imitate these trajectories and deploy the network to enable your robot to operate autonomously.
@@ -45,7 +45,7 @@ poetry install --sync --extras "dynamixel"
```bash
conda install -c conda-forge ffmpeg
pip uninstall opencv-python
-conda install -c conda-forge opencv>=4.10.0
+conda install -c conda-forge "opencv>=4.10.0"
```
You are now ready to plug the 5V power supply to the motor bus of the leader arm (the smaller one) since all its motors only require 5V.
@@ -78,12 +78,12 @@ To begin, create two instances of the [`DynamixelMotorsBus`](../lerobot/common/
To find the correct ports for each arm, run the utility script twice:
```bash
-python lerobot/common/robot_devices/motors/dynamixel.py
+python lerobot/scripts/find_motors_bus_port.py
```
Example output when identifying the leader arm's port (e.g., `/dev/tty.usbmodem575E0031751` on Mac, or possibly `/dev/ttyACM0` on Linux):
```
-Finding all available ports for the DynamixelMotorsBus.
+Finding all available ports for the MotorBus.
['/dev/tty.usbmodem575E0032081', '/dev/tty.usbmodem575E0031751']
Remove the usb cable from your DynamixelMotorsBus and press Enter when done.
@@ -95,7 +95,7 @@ Reconnect the usb cable.
Example output when identifying the follower arm's port (e.g., `/dev/tty.usbmodem575E0032081`, or possibly `/dev/ttyACM1` on Linux):
```
-Finding all available ports for the DynamixelMotorsBus.
+Finding all available ports for the MotorBus.
['/dev/tty.usbmodem575E0032081', '/dev/tty.usbmodem575E0031751']
Remove the usb cable from your DynamixelMotorsBus and press Enter when done.
@@ -778,7 +778,6 @@ Now run this to record 2 episodes:
python lerobot/scripts/control_robot.py record \
--robot-path lerobot/configs/robot/koch.yaml \
--fps 30 \
- --root data \
--repo-id ${HF_USER}/koch_test \
--tags tutorial \
--warmup-time-s 5 \
@@ -787,7 +786,7 @@ python lerobot/scripts/control_robot.py record \
--num-episodes 2
```
-This will write your dataset locally to `{root}/{repo-id}` (e.g. `data/cadene/koch_test`) and push it on the hub at `https://huggingface.co/datasets/{HF_USER}/{repo-id}`. Your dataset will be automatically tagged with `LeRobot` for the community to find it easily, and you can also add custom tags (in this case `tutorial` for example).
+This will write your dataset locally to `~/.cache/huggingface/lerobot/{repo-id}` (e.g. `data/cadene/koch_test`) and push it on the hub at `https://huggingface.co/datasets/{HF_USER}/{repo-id}`. Your dataset will be automatically tagged with `LeRobot` for the community to find it easily, and you can also add custom tags (in this case `tutorial` for example).
You can look for other LeRobot datasets on the hub by searching for `LeRobot` tags: https://huggingface.co/datasets?other=LeRobot
@@ -840,7 +839,6 @@ In the coming months, we plan to release a foundational model for robotics. We a
You can visualize your dataset by running:
```bash
python lerobot/scripts/visualize_dataset_html.py \
- --root data \
--repo-id ${HF_USER}/koch_test
```
@@ -858,7 +856,6 @@ To replay the first episode of the dataset you just recorded, run the following
python lerobot/scripts/control_robot.py replay \
--robot-path lerobot/configs/robot/koch.yaml \
--fps 30 \
- --root data \
--repo-id ${HF_USER}/koch_test \
--episode 0
```
@@ -871,7 +868,7 @@ Your robot should replicate movements similar to those you recorded. For example
To train a policy to control your robot, use the [`python lerobot/scripts/train.py`](../lerobot/scripts/train.py) script. A few arguments are required. Here is an example command:
```bash
-DATA_DIR=data python lerobot/scripts/train.py \
+python lerobot/scripts/train.py \
dataset_repo_id=${HF_USER}/koch_test \
policy=act_koch_real \
env=koch_real \
@@ -918,7 +915,6 @@ env:
It should match your dataset (e.g. `fps: 30`) and your robot (e.g. `state_dim: 6` and `action_dim: 6`). We are still working on simplifying this in future versions of `lerobot`.
4. We provided `device=cuda` since we are training on a Nvidia GPU, but you could use `device=mps` to train on Apple silicon.
5. We provided `wandb.enable=true` to use [Weights and Biases](https://docs.wandb.ai/quickstart) for visualizing training plots. This is optional but if you use it, make sure you are logged in by running `wandb login`.
-6. We added `DATA_DIR=data` to access your dataset stored in your local `data` directory. If you dont provide `DATA_DIR`, your dataset will be downloaded from Hugging Face hub to your cache folder `$HOME/.cache/hugginface`. In future versions of `lerobot`, both directories will be in sync.
For more information on the `train` script see the previous tutorial: [`examples/4_train_policy_with_script.md`](../examples/4_train_policy_with_script.md)
@@ -991,7 +987,6 @@ To this end, you can use the `record` function from [`lerobot/scripts/control_ro
python lerobot/scripts/control_robot.py record \
--robot-path lerobot/configs/robot/koch.yaml \
--fps 30 \
- --root data \
--repo-id ${HF_USER}/eval_koch_test \
--tags tutorial eval \
--warmup-time-s 5 \
@@ -1010,7 +1005,6 @@ As you can see, it's almost the same command as previously used to record your t
You can then visualize your evaluation dataset by running the same command as before but with the new inference dataset as argument:
```bash
python lerobot/scripts/visualize_dataset.py \
- --root data \
--repo-id ${HF_USER}/eval_koch_test
```
diff --git a/examples/8_use_stretch.md b/examples/8_use_stretch.md
new file mode 100644
index 000000000..c2c306f07
--- /dev/null
+++ b/examples/8_use_stretch.md
@@ -0,0 +1,156 @@
+This tutorial explains how to use [Stretch 3](https://hello-robot.com/stretch-3-product) with LeRobot.
+
+## Setup
+
+Familiarize yourself with Stretch by following its [tutorials](https://docs.hello-robot.com/0.3/getting_started/hello_robot/) (recommended).
+
+To use LeRobot on Stretch, 3 options are available:
+- [tethered setup](https://docs.hello-robot.com/0.3/getting_started/connecting_to_stretch/#tethered-setup)
+- [untethered setup](https://docs.hello-robot.com/0.3/getting_started/connecting_to_stretch/#untethered-setup)
+- ssh directly into Stretch (you will first need to install and configure openssh-server on stretch using one of the two above setups)
+
+
+## Install LeRobot
+
+On Stretch's CLI, follow these steps:
+
+1. [Install Miniconda](https://docs.anaconda.com/miniconda/#quick-command-line-install):
+```bash
+mkdir -p ~/miniconda3
+wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda3/miniconda.sh
+bash ~/miniconda3/miniconda.sh -b -u -p ~/miniconda3
+rm ~/miniconda3/miniconda.sh
+~/miniconda3/bin/conda init bash
+```
+
+2. Comment out these lines in `~/.profile` (this can mess up paths used by conda and ~/.local/bin should already be in your PATH)
+```
+# set PATH so it includes user's private bin if it exists
+if [ -d "$HOME/.local/bin" ] ; then
+ PATH="$HOME/.local/bin:$PATH"
+fi
+```
+
+3. Restart shell or `source ~/.bashrc`
+
+4. Create and activate a fresh conda environment for lerobot
+```bash
+conda create -y -n lerobot python=3.10 && conda activate lerobot
+```
+
+5. Clone LeRobot:
+```bash
+git clone https://github.com/huggingface/lerobot.git ~/lerobot
+```
+
+6. Install LeRobot with stretch dependencies:
+```bash
+cd ~/lerobot && pip install -e ".[stretch]"
+```
+
+> **Note:** If you get this message, you can ignore it: `ERROR: pip's dependency resolver does not currently take into account all the packages that are installed.`
+
+For Linux only (not Mac), install extra dependencies for recording datasets:
+```bash
+conda install -y -c conda-forge ffmpeg
+pip uninstall -y opencv-python
+conda install -y -c conda-forge "opencv>=4.10.0"
+```
+
+7. Run a [system check](https://docs.hello-robot.com/0.3/getting_started/stretch_hardware_overview/#system-check) to make sure your robot is ready:
+```bash
+stretch_system_check.py
+```
+
+> **Note:** You may need to free the "robot process" after booting Stretch by running `stretch_free_robot_process.py`. For more info this Stretch's [doc](https://docs.hello-robot.com/0.3/getting_started/stretch_hardware_overview/#turning-off-gamepad-teleoperation).
+
+You should get something like this:
+```bash
+For use with S T R E T C H (R) from Hello Robot Inc.
+---------------------------------------------------------------------
+
+Model = Stretch 3
+Tool = DexWrist 3 w/ Gripper
+Serial Number = stretch-se3-3054
+
+---- Checking Hardware ----
+[Pass] Comms are ready
+[Pass] Actuators are ready
+[Warn] Sensors not ready (IMU AZ = -10.19 out of range -10.1 to -9.5)
+[Pass] Battery voltage is 13.6 V
+
+---- Checking Software ----
+[Pass] Ubuntu 22.04 is ready
+[Pass] All APT pkgs are setup correctly
+[Pass] Firmware is up-to-date
+[Pass] Python pkgs are up-to-date
+[Pass] ROS2 Humble is ready
+```
+
+## Teleoperate, record a dataset and run a policy
+
+**Calibrate (Optional)**
+Before operating Stretch, you need to [home](https://docs.hello-robot.com/0.3/getting_started/stretch_hardware_overview/#homing) it first. Be mindful about giving Stretch some space as this procedure will move the robot's arm and gripper. Now run this command:
+```bash
+python lerobot/scripts/control_robot.py calibrate \
+ --robot-path lerobot/configs/robot/stretch.yaml
+```
+This is equivalent to running `stretch_robot_home.py`
+
+> **Note:** If you run any of the LeRobot scripts below and Stretch is not poperly homed, it will automatically home/calibrate first.
+
+**Teleoperate**
+Before trying teleoperation, you need activate the gamepad controller by pressing the middle button. For more info, see Stretch's [doc](https://docs.hello-robot.com/0.3/getting_started/hello_robot/#gamepad-teleoperation).
+
+Now try out teleoperation (see above documentation to learn about the gamepad controls):
+```bash
+python lerobot/scripts/control_robot.py teleoperate \
+ --robot-path lerobot/configs/robot/stretch.yaml
+```
+This is essentially the same as running `stretch_gamepad_teleop.py`
+
+**Record a dataset**
+Once you're familiar with the gamepad controls and after a bit of practice, you can try to record your first dataset with Stretch.
+
+If you want to use the Hugging Face hub features for uploading your dataset and you haven't previously done it, make sure you've logged in using a write-access token, which can be generated from the [Hugging Face settings](https://huggingface.co/settings/tokens):
+```bash
+huggingface-cli login --token ${HUGGINGFACE_TOKEN} --add-to-git-credential
+```
+
+Store your Hugging Face repository name in a variable to run these commands:
+```bash
+HF_USER=$(huggingface-cli whoami | head -n 1)
+echo $HF_USER
+```
+
+Record one episode:
+```bash
+python lerobot/scripts/control_robot.py record \
+ --robot-path lerobot/configs/robot/stretch.yaml \
+ --fps 20 \
+ --repo-id ${HF_USER}/stretch_test \
+ --tags stretch tutorial \
+ --warmup-time-s 3 \
+ --episode-time-s 40 \
+ --reset-time-s 10 \
+ --num-episodes 1 \
+ --push-to-hub 0
+```
+
+> **Note:** If you're using ssh to connect to Stretch and run this script, you won't be able to visualize its cameras feed (though they will still be recording). To see the cameras stream, use [tethered](https://docs.hello-robot.com/0.3/getting_started/connecting_to_stretch/#tethered-setup) or [untethered setup](https://docs.hello-robot.com/0.3/getting_started/connecting_to_stretch/#untethered-setup).
+
+**Replay an episode**
+Now try to replay this episode (make sure the robot's initial position is the same):
+```bash
+python lerobot/scripts/control_robot.py replay \
+ --robot-path lerobot/configs/robot/stretch.yaml \
+ --fps 20 \
+ --repo-id ${HF_USER}/stretch_test \
+ --episode 0
+```
+
+Follow [previous tutorial](https://github.com/huggingface/lerobot/blob/main/examples/7_get_started_with_real_robot.md#4-train-a-policy-on-your-data) to train a policy on your data and run inference on your robot. You will need to adapt the code for Stretch.
+
+> TODO(rcadene, aliberts): Add already setup environment and policy yaml configuration files
+
+If you need help, please reach out on Discord in the channel `#stretch3-mobile-arm`.
diff --git a/examples/9_use_aloha.md b/examples/9_use_aloha.md
new file mode 100644
index 000000000..1abf7c495
--- /dev/null
+++ b/examples/9_use_aloha.md
@@ -0,0 +1,174 @@
+This tutorial explains how to use [Aloha and Aloha 2 stationary](https://www.trossenrobotics.com/aloha-stationary) with LeRobot.
+
+## Setup
+
+Follow the [documentation from Trossen Robotics](https://docs.trossenrobotics.com/aloha_docs/getting_started/stationary/hardware_setup.html) for setting up the hardware and plugging the 4 arms and 4 cameras to your computer.
+
+
+## Install LeRobot
+
+On your computer:
+
+1. [Install Miniconda](https://docs.anaconda.com/miniconda/#quick-command-line-install):
+```bash
+mkdir -p ~/miniconda3
+wget https://repo.anaconda.com/miniconda/Miniconda3-latest-Linux-x86_64.sh -O ~/miniconda3/miniconda.sh
+bash ~/miniconda3/miniconda.sh -b -u -p ~/miniconda3
+rm ~/miniconda3/miniconda.sh
+~/miniconda3/bin/conda init bash
+```
+
+2. Restart shell or `source ~/.bashrc`
+
+3. Create and activate a fresh conda environment for lerobot
+```bash
+conda create -y -n lerobot python=3.10 && conda activate lerobot
+```
+
+4. Clone LeRobot:
+```bash
+git clone https://github.com/huggingface/lerobot.git ~/lerobot
+```
+
+5. Install LeRobot with dependencies for the Aloha motors (dynamixel) and cameras (intelrealsense):
+```bash
+cd ~/lerobot && pip install -e ".[dynamixel, intelrealsense]"
+```
+
+For Linux only (not Mac), install extra dependencies for recording datasets:
+```bash
+conda install -y -c conda-forge ffmpeg
+pip uninstall -y opencv-python
+conda install -y -c conda-forge "opencv>=4.10.0"
+```
+
+## Teleoperate
+
+**/!\ FOR SAFETY, READ THIS /!\**
+Teleoperation consists in manually operating the leader arms to move the follower arms. Importantly:
+1. Make sure your leader arms are in the same position as the follower arms, so that the follower arms don't move too fast to match the leader arms,
+2. Our code assumes that your robot has been assembled following Trossen Robotics instructions. This allows us to skip calibration, as we use the pre-defined calibration files in `.cache/calibration/aloha_default`. If you replace a motor, make sure you follow the exact instructions from Trossen Robotics.
+
+By running the following code, you can start your first **SAFE** teleoperation:
+```bash
+python lerobot/scripts/control_robot.py teleoperate \
+ --robot-path lerobot/configs/robot/aloha.yaml \
+ --robot-overrides max_relative_target=5
+```
+
+By adding `--robot-overrides max_relative_target=5`, we override the default value for `max_relative_target` defined in `lerobot/configs/robot/aloha.yaml`. It is expected to be `5` to limit the magnitude of the movement for more safety, but the teloperation won't be smooth. When you feel confident, you can disable this limit by adding `--robot-overrides max_relative_target=null` to the command line:
+```bash
+python lerobot/scripts/control_robot.py teleoperate \
+ --robot-path lerobot/configs/robot/aloha.yaml \
+ --robot-overrides max_relative_target=null
+```
+
+## Record a dataset
+
+Once you're familiar with teleoperation, you can record your first dataset with Aloha.
+
+If you want to use the Hugging Face hub features for uploading your dataset and you haven't previously done it, make sure you've logged in using a write-access token, which can be generated from the [Hugging Face settings](https://huggingface.co/settings/tokens):
+```bash
+huggingface-cli login --token ${HUGGINGFACE_TOKEN} --add-to-git-credential
+```
+
+Store your Hugging Face repository name in a variable to run these commands:
+```bash
+HF_USER=$(huggingface-cli whoami | head -n 1)
+echo $HF_USER
+```
+
+Record 2 episodes and upload your dataset to the hub:
+```bash
+python lerobot/scripts/control_robot.py record \
+ --robot-path lerobot/configs/robot/aloha.yaml \
+ --robot-overrides max_relative_target=null \
+ --fps 30 \
+ --repo-id ${HF_USER}/aloha_test \
+ --tags aloha tutorial \
+ --warmup-time-s 5 \
+ --episode-time-s 40 \
+ --reset-time-s 10 \
+ --num-episodes 2 \
+ --push-to-hub 1
+```
+
+## Visualize a dataset
+
+If you uploaded your dataset to the hub with `--push-to-hub 1`, you can [visualize your dataset online](https://huggingface.co/spaces/lerobot/visualize_dataset) by copy pasting your repo id given by:
+```bash
+echo ${HF_USER}/aloha_test
+```
+
+If you didn't upload with `--push-to-hub 0`, you can also visualize it locally with:
+```bash
+python lerobot/scripts/visualize_dataset_html.py \
+ --repo-id ${HF_USER}/aloha_test
+```
+
+## Replay an episode
+
+**/!\ FOR SAFETY, READ THIS /!\**
+Replay consists in automatically replaying the sequence of actions (i.e. goal positions for your motors) recorded in a given dataset episode. Make sure the current initial position of your robot is similar to the one in your episode, so that your follower arms don't move too fast to go to the first goal positions. For safety, you might want to add `--robot-overrides max_relative_target=5` to your command line as explained above.
+
+Now try to replay the first episode on your robot:
+```bash
+python lerobot/scripts/control_robot.py replay \
+ --robot-path lerobot/configs/robot/aloha.yaml \
+ --robot-overrides max_relative_target=null \
+ --fps 30 \
+ --repo-id ${HF_USER}/aloha_test \
+ --episode 0
+```
+
+## Train a policy
+
+To train a policy to control your robot, use the [`python lerobot/scripts/train.py`](../lerobot/scripts/train.py) script. A few arguments are required. Here is an example command:
+```bash
+python lerobot/scripts/train.py \
+ dataset_repo_id=${HF_USER}/aloha_test \
+ policy=act_aloha_real \
+ env=aloha_real \
+ hydra.run.dir=outputs/train/act_aloha_test \
+ hydra.job.name=act_aloha_test \
+ device=cuda \
+ wandb.enable=true
+```
+
+Let's explain it:
+1. We provided the dataset as argument with `dataset_repo_id=${HF_USER}/aloha_test`.
+2. We provided the policy with `policy=act_aloha_real`. This loads configurations from [`lerobot/configs/policy/act_aloha_real.yaml`](../lerobot/configs/policy/act_aloha_real.yaml). Importantly, this policy uses 4 cameras as input `cam_right_wrist`, `cam_left_wrist`, `cam_high`, and `cam_low`.
+3. We provided an environment as argument with `env=aloha_real`. This loads configurations from [`lerobot/configs/env/aloha_real.yaml`](../lerobot/configs/env/aloha_real.yaml). Note: this yaml defines 18 dimensions for the `state_dim` and `action_dim`, corresponding to 18 motors, not 14 motors as used in previous Aloha work. This is because, we include the `shoulder_shadow` and `elbow_shadow` motors for simplicity.
+4. We provided `device=cuda` since we are training on a Nvidia GPU.
+5. We provided `wandb.enable=true` to use [Weights and Biases](https://docs.wandb.ai/quickstart) for visualizing training plots. This is optional but if you use it, make sure you are logged in by running `wandb login`.
+
+Training should take several hours. You will find checkpoints in `outputs/train/act_aloha_test/checkpoints`.
+
+## Evaluate your policy
+
+You can use the `record` function from [`lerobot/scripts/control_robot.py`](../lerobot/scripts/control_robot.py) but with a policy checkpoint as input. For instance, run this command to record 10 evaluation episodes:
+```bash
+python lerobot/scripts/control_robot.py record \
+ --robot-path lerobot/configs/robot/aloha.yaml \
+ --robot-overrides max_relative_target=null \
+ --fps 30 \
+ --repo-id ${HF_USER}/eval_act_aloha_test \
+ --tags aloha tutorial eval \
+ --warmup-time-s 5 \
+ --episode-time-s 40 \
+ --reset-time-s 10 \
+ --num-episodes 10 \
+ --num-image-writer-processes 1 \
+ -p outputs/train/act_aloha_test/checkpoints/last/pretrained_model
+```
+
+As you can see, it's almost the same command as previously used to record your training dataset. Two things changed:
+1. There is an additional `-p` argument which indicates the path to your policy checkpoint with (e.g. `-p outputs/train/eval_aloha_test/checkpoints/last/pretrained_model`). You can also use the model repository if you uploaded a model checkpoint to the hub (e.g. `-p ${HF_USER}/act_aloha_test`).
+2. The name of dataset begins by `eval` to reflect that you are running inference (e.g. `--repo-id ${HF_USER}/eval_act_aloha_test`).
+3. We use `--num-image-writer-processes 1` instead of the default value (`0`). On our computer, using a dedicated process to write images from the 4 cameras on disk allows to reach constent 30 fps during inference. Feel free to explore different values for `--num-image-writer-processes`.
+
+## More
+
+Follow this [previous tutorial](https://github.com/huggingface/lerobot/blob/main/examples/7_get_started_with_real_robot.md#4-train-a-policy-on-your-data) for a more in-depth explaination.
+
+If you have any question or need help, please reach out on Discord in the channel `#aloha-arm`.
diff --git a/examples/advanced/2_calculate_validation_loss.py b/examples/advanced/2_calculate_validation_loss.py
index 1428014b6..00ba9930f 100644
--- a/examples/advanced/2_calculate_validation_loss.py
+++ b/examples/advanced/2_calculate_validation_loss.py
@@ -14,7 +14,7 @@
import torch
from huggingface_hub import snapshot_download
-from lerobot.common.datasets.lerobot_dataset import LeRobotDataset
+from lerobot.common.datasets.lerobot_dataset import LeRobotDataset, LeRobotDatasetMetadata
from lerobot.common.policies.diffusion.modeling_diffusion import DiffusionPolicy
device = torch.device("cuda")
@@ -41,26 +41,20 @@
}
# Load the last 10% of episodes of the dataset as a validation set.
-# - Load full dataset
-full_dataset = LeRobotDataset("lerobot/pusht", split="train")
-# - Calculate train and val subsets
-num_train_episodes = math.floor(full_dataset.num_episodes * 90 / 100)
-num_val_episodes = full_dataset.num_episodes - num_train_episodes
-print(f"Number of episodes in full dataset: {full_dataset.num_episodes}")
-print(f"Number of episodes in training dataset (90% subset): {num_train_episodes}")
-print(f"Number of episodes in validation dataset (10% subset): {num_val_episodes}")
-# - Get first frame index of the validation set
-first_val_frame_index = full_dataset.episode_data_index["from"][num_train_episodes].item()
-# - Load frames subset belonging to validation set using the `split` argument.
-# It utilizes the `datasets` library's syntax for slicing datasets.
-# For more information on the Slice API, please see:
-# https://huggingface.co/docs/datasets/v2.19.0/loading#slice-splits
-train_dataset = LeRobotDataset(
- "lerobot/pusht", split=f"train[:{first_val_frame_index}]", delta_timestamps=delta_timestamps
-)
-val_dataset = LeRobotDataset(
- "lerobot/pusht", split=f"train[{first_val_frame_index}:]", delta_timestamps=delta_timestamps
-)
+# - Load dataset metadata
+dataset_metadata = LeRobotDatasetMetadata("lerobot/pusht")
+# - Calculate train and val episodes
+total_episodes = dataset_metadata.total_episodes
+episodes = list(range(dataset_metadata.total_episodes))
+num_train_episodes = math.floor(total_episodes * 90 / 100)
+train_episodes = episodes[:num_train_episodes]
+val_episodes = episodes[num_train_episodes:]
+print(f"Number of episodes in full dataset: {total_episodes}")
+print(f"Number of episodes in training dataset (90% subset): {len(train_episodes)}")
+print(f"Number of episodes in validation dataset (10% subset): {len(val_episodes)}")
+# - Load train an val datasets
+train_dataset = LeRobotDataset("lerobot/pusht", episodes=train_episodes, delta_timestamps=delta_timestamps)
+val_dataset = LeRobotDataset("lerobot/pusht", episodes=val_episodes, delta_timestamps=delta_timestamps)
print(f"Number of frames in training dataset (90% subset): {len(train_dataset)}")
print(f"Number of frames in validation dataset (10% subset): {len(val_dataset)}")
diff --git a/examples/port_datasets/pusht_zarr.py b/examples/port_datasets/pusht_zarr.py
new file mode 100644
index 000000000..60df98405
--- /dev/null
+++ b/examples/port_datasets/pusht_zarr.py
@@ -0,0 +1,222 @@
+import shutil
+from pathlib import Path
+
+import numpy as np
+import torch
+
+from lerobot.common.datasets.lerobot_dataset import LEROBOT_HOME, LeRobotDataset
+from lerobot.common.datasets.push_dataset_to_hub._download_raw import download_raw
+
+PUSHT_TASK = "Push the T-shaped blue block onto the T-shaped green target surface."
+PUSHT_FEATURES = {
+ "observation.state": {
+ "dtype": "float32",
+ "shape": (2,),
+ "names": {
+ "axes": ["x", "y"],
+ },
+ },
+ "action": {
+ "dtype": "float32",
+ "shape": (2,),
+ "names": {
+ "axes": ["x", "y"],
+ },
+ },
+ "next.reward": {
+ "dtype": "float32",
+ "shape": (1,),
+ "names": None,
+ },
+ "next.success": {
+ "dtype": "bool",
+ "shape": (1,),
+ "names": None,
+ },
+ "observation.environment_state": {
+ "dtype": "float32",
+ "shape": (16,),
+ "names": [
+ "keypoints",
+ ],
+ },
+ "observation.image": {
+ "dtype": None,
+ "shape": (3, 96, 96),
+ "names": [
+ "channel",
+ "height",
+ "width",
+ ],
+ },
+}
+
+
+def build_features(mode: str) -> dict:
+ features = PUSHT_FEATURES
+ if mode == "keypoints":
+ features.pop("observation.image")
+ else:
+ features.pop("observation.environment_state")
+ features["observation.image"]["dtype"] = mode
+
+ return features
+
+
+def load_raw_dataset(zarr_path: Path):
+ try:
+ from lerobot.common.datasets.push_dataset_to_hub._diffusion_policy_replay_buffer import (
+ ReplayBuffer as DiffusionPolicyReplayBuffer,
+ )
+ except ModuleNotFoundError as e:
+ print("`gym_pusht` is not installed. Please install it with `pip install 'lerobot[gym_pusht]'`")
+ raise e
+
+ zarr_data = DiffusionPolicyReplayBuffer.copy_from_path(zarr_path)
+ return zarr_data
+
+
+def calculate_coverage(zarr_data):
+ try:
+ import pymunk
+ from gym_pusht.envs.pusht import PushTEnv, pymunk_to_shapely
+ except ModuleNotFoundError as e:
+ print("`gym_pusht` is not installed. Please install it with `pip install 'lerobot[gym_pusht]'`")
+ raise e
+
+ block_pos = zarr_data["state"][:, 2:4]
+ block_angle = zarr_data["state"][:, 4]
+
+ num_frames = len(block_pos)
+
+ coverage = np.zeros((num_frames,))
+ # 8 keypoints with 2 coords each
+ keypoints = np.zeros((num_frames, 16))
+
+ # Set x, y, theta (in radians)
+ goal_pos_angle = np.array([256, 256, np.pi / 4])
+ goal_body = PushTEnv.get_goal_pose_body(goal_pos_angle)
+
+ for i in range(num_frames):
+ space = pymunk.Space()
+ space.gravity = 0, 0
+ space.damping = 0
+
+ # Add walls.
+ walls = [
+ PushTEnv.add_segment(space, (5, 506), (5, 5), 2),
+ PushTEnv.add_segment(space, (5, 5), (506, 5), 2),
+ PushTEnv.add_segment(space, (506, 5), (506, 506), 2),
+ PushTEnv.add_segment(space, (5, 506), (506, 506), 2),
+ ]
+ space.add(*walls)
+
+ block_body, block_shapes = PushTEnv.add_tee(space, block_pos[i].tolist(), block_angle[i].item())
+ goal_geom = pymunk_to_shapely(goal_body, block_body.shapes)
+ block_geom = pymunk_to_shapely(block_body, block_body.shapes)
+ intersection_area = goal_geom.intersection(block_geom).area
+ goal_area = goal_geom.area
+ coverage[i] = intersection_area / goal_area
+ keypoints[i] = torch.from_numpy(PushTEnv.get_keypoints(block_shapes).flatten())
+
+ return coverage, keypoints
+
+
+def calculate_success(coverage: float, success_threshold: float):
+ return coverage > success_threshold
+
+
+def calculate_reward(coverage: float, success_threshold: float):
+ return np.clip(coverage / success_threshold, 0, 1)
+
+
+def main(raw_dir: Path, repo_id: str, mode: str = "video", push_to_hub: bool = True):
+ if mode not in ["video", "image", "keypoints"]:
+ raise ValueError(mode)
+
+ if (LEROBOT_HOME / repo_id).exists():
+ shutil.rmtree(LEROBOT_HOME / repo_id)
+
+ if not raw_dir.exists():
+ download_raw(raw_dir, repo_id="lerobot-raw/pusht_raw")
+
+ zarr_data = load_raw_dataset(zarr_path=raw_dir / "pusht_cchi_v7_replay.zarr")
+
+ env_state = zarr_data["state"][:]
+ agent_pos = env_state[:, :2]
+
+ action = zarr_data["action"][:]
+ image = zarr_data["img"] # (b, h, w, c)
+
+ episode_data_index = {
+ "from": np.concatenate(([0], zarr_data.meta["episode_ends"][:-1])),
+ "to": zarr_data.meta["episode_ends"],
+ }
+
+ # Calculate success and reward based on the overlapping area
+ # of the T-object and the T-area.
+ coverage, keypoints = calculate_coverage(zarr_data)
+ success = calculate_success(coverage, success_threshold=0.95)
+ reward = calculate_reward(coverage, success_threshold=0.95)
+
+ features = build_features(mode)
+ dataset = LeRobotDataset.create(
+ repo_id=repo_id,
+ fps=10,
+ robot_type="2d pointer",
+ features=features,
+ image_writer_threads=4,
+ )
+ episodes = range(len(episode_data_index["from"]))
+ for ep_idx in episodes:
+ from_idx = episode_data_index["from"][ep_idx]
+ to_idx = episode_data_index["to"][ep_idx]
+ num_frames = to_idx - from_idx
+
+ for frame_idx in range(num_frames):
+ i = from_idx + frame_idx
+ frame = {
+ "action": torch.from_numpy(action[i]),
+ # Shift reward and success by +1 until the last item of the episode
+ "next.reward": reward[i + (frame_idx < num_frames - 1)],
+ "next.success": success[i + (frame_idx < num_frames - 1)],
+ }
+
+ frame["observation.state"] = torch.from_numpy(agent_pos[i])
+
+ if mode == "keypoints":
+ frame["observation.environment_state"] = torch.from_numpy(keypoints[i])
+ else:
+ frame["observation.image"] = torch.from_numpy(image[i])
+
+ dataset.add_frame(frame)
+
+ dataset.save_episode(task=PUSHT_TASK)
+
+ dataset.consolidate()
+
+ if push_to_hub:
+ dataset.push_to_hub()
+
+
+if __name__ == "__main__":
+ # To try this script, modify the repo id with your own HuggingFace user (e.g cadene/pusht)
+ repo_id = "lerobot/pusht"
+
+ modes = ["video", "image", "keypoints"]
+ # Uncomment if you want to try with a specific mode
+ # modes = ["video"]
+ # modes = ["image"]
+ # modes = ["keypoints"]
+
+ raw_dir = Path("data/lerobot-raw/pusht_raw")
+ for mode in modes:
+ if mode in ["image", "keypoints"]:
+ repo_id += f"_{mode}"
+
+ # download and load raw dataset, create LeRobotDataset, populate it, push to hub
+ main(raw_dir, repo_id=repo_id, mode=mode)
+
+ # Uncomment if you want to load the local dataset and explore it
+ # dataset = LeRobotDataset(repo_id=repo_id, local_files_only=True)
+ # breakpoint()
diff --git a/lerobot/__init__.py b/lerobot/__init__.py
index aeae31008..3d5bb6aaa 100644
--- a/lerobot/__init__.py
+++ b/lerobot/__init__.py
@@ -28,6 +28,8 @@
print(lerobot.available_policies)
print(lerobot.available_policies_per_env)
print(lerobot.available_robots)
+ print(lerobot.available_cameras)
+ print(lerobot.available_motors)
```
When implementing a new dataset loadable with LeRobotDataset follow these steps:
@@ -179,8 +181,8 @@
"lerobot/usc_cloth_sim",
]
-available_datasets = list(
- itertools.chain(*available_datasets_per_env.values(), available_real_world_datasets)
+available_datasets = sorted(
+ set(itertools.chain(*available_datasets_per_env.values(), available_real_world_datasets))
)
# lists all available policies from `lerobot/common/policies`
@@ -196,6 +198,20 @@
"koch",
"koch_bimanual",
"aloha",
+ "so100",
+ "moss",
+]
+
+# lists all available cameras from `lerobot/common/robot_devices/cameras`
+available_cameras = [
+ "opencv",
+ "intelrealsense",
+]
+
+# lists all available motors from `lerobot/common/robot_devices/motors`
+available_motors = [
+ "dynamixel",
+ "feetech",
]
# keys and values refer to yaml files
@@ -203,7 +219,9 @@
"aloha": ["act"],
"pusht": ["diffusion", "vqbet"],
"xarm": ["tdmpc"],
- "dora_aloha_real": ["act_real"],
+ "koch_real": ["act_koch_real"],
+ "aloha_real": ["act_aloha_real"],
+ "dora_aloha_real": ["act_aloha_real"],
}
env_task_pairs = [(env, task) for env, tasks in available_tasks_per_env.items() for task in tasks]
diff --git a/lerobot/common/datasets/card_template.md b/lerobot/common/datasets/card_template.md
new file mode 100644
index 000000000..7ee27df95
--- /dev/null
+++ b/lerobot/common/datasets/card_template.md
@@ -0,0 +1,27 @@
+---
+# For reference on dataset card metadata, see the spec: https://github.com/huggingface/hub-docs/blob/main/datasetcard.md?plain=1
+# Doc / guide: https://huggingface.co/docs/hub/datasets-cards
+{{ card_data }}
+---
+
+This dataset was created using [LeRobot](https://github.com/huggingface/lerobot).
+
+## Dataset Description
+
+{{ dataset_description | default("", true) }}
+
+- **Homepage:** {{ url | default("[More Information Needed]", true)}}
+- **Paper:** {{ paper | default("[More Information Needed]", true)}}
+- **License:** {{ license | default("[More Information Needed]", true)}}
+
+## Dataset Structure
+
+{{ dataset_structure | default("[More Information Needed]", true)}}
+
+## Citation
+
+**BibTeX:**
+
+```bibtex
+{{ citation_bibtex | default("[More Information Needed]", true)}}
+```
diff --git a/lerobot/common/datasets/compute_stats.py b/lerobot/common/datasets/compute_stats.py
index 208284465..c62116994 100644
--- a/lerobot/common/datasets/compute_stats.py
+++ b/lerobot/common/datasets/compute_stats.py
@@ -19,9 +19,6 @@
import einops
import torch
import tqdm
-from datasets import Image
-
-from lerobot.common.datasets.video_utils import VideoFrame
def get_stats_einops_patterns(dataset, num_workers=0):
@@ -39,15 +36,13 @@ def get_stats_einops_patterns(dataset, num_workers=0):
batch = next(iter(dataloader))
stats_patterns = {}
- for key, feats_type in dataset.features.items():
- # NOTE: skip language_instruction embedding in stats computation
- if key == "language_instruction":
- continue
+ for key in dataset.features:
# sanity check that tensors are not float64
assert batch[key].dtype != torch.float64
- if isinstance(feats_type, (VideoFrame, Image)):
+ # if isinstance(feats_type, (VideoFrame, Image)):
+ if key in dataset.meta.camera_keys:
# sanity check that images are channel first
_, c, h, w = batch[key].shape
assert c < h and c < w, f"expect channel first images, but instead {batch[key].shape}"
@@ -63,12 +58,12 @@ def get_stats_einops_patterns(dataset, num_workers=0):
elif batch[key].ndim == 1:
stats_patterns[key] = "b -> 1"
else:
- raise ValueError(f"{key}, {feats_type}, {batch[key].shape}")
+ raise ValueError(f"{key}, {batch[key].shape}")
return stats_patterns
-def compute_stats(dataset, batch_size=32, num_workers=16, max_num_samples=None):
+def compute_stats(dataset, batch_size=8, num_workers=8, max_num_samples=None):
"""Compute mean/std and min/max statistics of all data keys in a LeRobotDataset."""
if max_num_samples is None:
max_num_samples = len(dataset)
@@ -175,39 +170,45 @@ def aggregate_stats(ls_datasets) -> dict[str, torch.Tensor]:
"""
data_keys = set()
for dataset in ls_datasets:
- data_keys.update(dataset.stats.keys())
+ data_keys.update(dataset.meta.stats.keys())
stats = {k: {} for k in data_keys}
for data_key in data_keys:
for stat_key in ["min", "max"]:
# compute `max(dataset_0["max"], dataset_1["max"], ...)`
stats[data_key][stat_key] = einops.reduce(
- torch.stack([d.stats[data_key][stat_key] for d in ls_datasets if data_key in d.stats], dim=0),
+ torch.stack(
+ [ds.meta.stats[data_key][stat_key] for ds in ls_datasets if data_key in ds.meta.stats],
+ dim=0,
+ ),
"n ... -> ...",
stat_key,
)
- total_samples = sum(d.num_samples for d in ls_datasets if data_key in d.stats)
+ total_samples = sum(d.num_frames for d in ls_datasets if data_key in d.meta.stats)
# Compute the "sum" statistic by multiplying each mean by the number of samples in the respective
# dataset, then divide by total_samples to get the overall "mean".
- # NOTE: the brackets around (d.num_samples / total_samples) are needed tor minimize the risk of
+ # NOTE: the brackets around (d.num_frames / total_samples) are needed tor minimize the risk of
# numerical overflow!
stats[data_key]["mean"] = sum(
- d.stats[data_key]["mean"] * (d.num_samples / total_samples)
+ d.meta.stats[data_key]["mean"] * (d.num_frames / total_samples)
for d in ls_datasets
- if data_key in d.stats
+ if data_key in d.meta.stats
)
# The derivation for standard deviation is a little more involved but is much in the same spirit as
# the computation of the mean.
# Given two sets of data where the statistics are known:
# σ_combined = sqrt[ (n1 * (σ1^2 + d1^2) + n2 * (σ2^2 + d2^2)) / (n1 + n2) ]
# where d1 = μ1 - μ_combined, d2 = μ2 - μ_combined
- # NOTE: the brackets around (d.num_samples / total_samples) are needed tor minimize the risk of
+ # NOTE: the brackets around (d.num_frames / total_samples) are needed tor minimize the risk of
# numerical overflow!
stats[data_key]["std"] = torch.sqrt(
sum(
- (d.stats[data_key]["std"] ** 2 + (d.stats[data_key]["mean"] - stats[data_key]["mean"]) ** 2)
- * (d.num_samples / total_samples)
+ (
+ d.meta.stats[data_key]["std"] ** 2
+ + (d.meta.stats[data_key]["mean"] - stats[data_key]["mean"]) ** 2
+ )
+ * (d.num_frames / total_samples)
for d in ls_datasets
- if data_key in d.stats
+ if data_key in d.meta.stats
)
)
return stats
diff --git a/lerobot/common/datasets/factory.py b/lerobot/common/datasets/factory.py
index 96a353fbf..f6164ed1d 100644
--- a/lerobot/common/datasets/factory.py
+++ b/lerobot/common/datasets/factory.py
@@ -91,9 +91,9 @@ def make_dataset(cfg, split: str = "train") -> LeRobotDataset | MultiLeRobotData
)
if isinstance(cfg.dataset_repo_id, str):
+ # TODO (aliberts): add 'episodes' arg from config after removing hydra
dataset = LeRobotDataset(
cfg.dataset_repo_id,
- split=split,
delta_timestamps=cfg.training.get("delta_timestamps"),
image_transforms=image_transforms,
video_backend=cfg.video_backend,
@@ -101,7 +101,6 @@ def make_dataset(cfg, split: str = "train") -> LeRobotDataset | MultiLeRobotData
else:
dataset = MultiLeRobotDataset(
cfg.dataset_repo_id,
- split=split,
delta_timestamps=cfg.training.get("delta_timestamps"),
image_transforms=image_transforms,
video_backend=cfg.video_backend,
@@ -112,6 +111,6 @@ def make_dataset(cfg, split: str = "train") -> LeRobotDataset | MultiLeRobotData
for stats_type, listconfig in stats_dict.items():
# example of stats_type: min, max, mean, std
stats = OmegaConf.to_container(listconfig, resolve=True)
- dataset.stats[key][stats_type] = torch.tensor(stats, dtype=torch.float32)
+ dataset.meta.stats[key][stats_type] = torch.tensor(stats, dtype=torch.float32)
return dataset
diff --git a/lerobot/common/datasets/image_writer.py b/lerobot/common/datasets/image_writer.py
new file mode 100644
index 000000000..9564fb591
--- /dev/null
+++ b/lerobot/common/datasets/image_writer.py
@@ -0,0 +1,160 @@
+#!/usr/bin/env python
+
+# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import multiprocessing
+import queue
+import threading
+from pathlib import Path
+
+import numpy as np
+import PIL.Image
+import torch
+
+
+def safe_stop_image_writer(func):
+ def wrapper(*args, **kwargs):
+ try:
+ return func(*args, **kwargs)
+ except Exception as e:
+ dataset = kwargs.get("dataset", None)
+ image_writer = getattr(dataset, "image_writer", None) if dataset else None
+ if image_writer is not None:
+ print("Waiting for image writer to terminate...")
+ image_writer.stop()
+ raise e
+
+ return wrapper
+
+
+def image_array_to_image(image_array: np.ndarray) -> PIL.Image.Image:
+ # TODO(aliberts): handle 1 channel and 4 for depth images
+ if image_array.ndim == 3 and image_array.shape[0] in [1, 3]:
+ # Transpose from pytorch convention (C, H, W) to (H, W, C)
+ image_array = image_array.transpose(1, 2, 0)
+ if image_array.dtype != np.uint8:
+ # Assume the image is in [0, 1] range for floating-point data
+ image_array = np.clip(image_array, 0, 1)
+ image_array = (image_array * 255).astype(np.uint8)
+ return PIL.Image.fromarray(image_array)
+
+
+def write_image(image: np.ndarray | PIL.Image.Image, fpath: Path):
+ try:
+ if isinstance(image, np.ndarray):
+ img = image_array_to_image(image)
+ elif isinstance(image, PIL.Image.Image):
+ img = image
+ else:
+ raise TypeError(f"Unsupported image type: {type(image)}")
+ img.save(fpath)
+ except Exception as e:
+ print(f"Error writing image {fpath}: {e}")
+
+
+def worker_thread_loop(queue: queue.Queue):
+ while True:
+ item = queue.get()
+ if item is None:
+ queue.task_done()
+ break
+ image_array, fpath = item
+ write_image(image_array, fpath)
+ queue.task_done()
+
+
+def worker_process(queue: queue.Queue, num_threads: int):
+ threads = []
+ for _ in range(num_threads):
+ t = threading.Thread(target=worker_thread_loop, args=(queue,))
+ t.daemon = True
+ t.start()
+ threads.append(t)
+ for t in threads:
+ t.join()
+
+
+class AsyncImageWriter:
+ """
+ This class abstract away the initialisation of processes or/and threads to
+ save images on disk asynchrounously, which is critical to control a robot and record data
+ at a high frame rate.
+
+ When `num_processes=0`, it creates a threads pool of size `num_threads`.
+ When `num_processes>0`, it creates processes pool of size `num_processes`, where each subprocess starts
+ their own threads pool of size `num_threads`.
+
+ The optimal number of processes and threads depends on your computer capabilities.
+ We advise to use 4 threads per camera with 0 processes. If the fps is not stable, try to increase or lower
+ the number of threads. If it is still not stable, try to use 1 subprocess, or more.
+ """
+
+ def __init__(self, num_processes: int = 0, num_threads: int = 1):
+ self.num_processes = num_processes
+ self.num_threads = num_threads
+ self.queue = None
+ self.threads = []
+ self.processes = []
+ self._stopped = False
+
+ if num_threads <= 0 and num_processes <= 0:
+ raise ValueError("Number of threads and processes must be greater than zero.")
+
+ if self.num_processes == 0:
+ # Use threading
+ self.queue = queue.Queue()
+ for _ in range(self.num_threads):
+ t = threading.Thread(target=worker_thread_loop, args=(self.queue,))
+ t.daemon = True
+ t.start()
+ self.threads.append(t)
+ else:
+ # Use multiprocessing
+ self.queue = multiprocessing.JoinableQueue()
+ for _ in range(self.num_processes):
+ p = multiprocessing.Process(target=worker_process, args=(self.queue, self.num_threads))
+ p.daemon = True
+ p.start()
+ self.processes.append(p)
+
+ def save_image(self, image: torch.Tensor | np.ndarray | PIL.Image.Image, fpath: Path):
+ if isinstance(image, torch.Tensor):
+ # Convert tensor to numpy array to minimize main process time
+ image = image.cpu().numpy()
+ self.queue.put((image, fpath))
+
+ def wait_until_done(self):
+ self.queue.join()
+
+ def stop(self):
+ if self._stopped:
+ return
+
+ if self.num_processes == 0:
+ for _ in self.threads:
+ self.queue.put(None)
+ for t in self.threads:
+ t.join()
+ else:
+ num_nones = self.num_processes * self.num_threads
+ for _ in range(num_nones):
+ self.queue.put(None)
+ for p in self.processes:
+ p.join()
+ if p.is_alive():
+ p.terminate()
+ self.queue.close()
+ self.queue.join_thread()
+
+ self._stopped = True
diff --git a/lerobot/common/datasets/lerobot_dataset.py b/lerobot/common/datasets/lerobot_dataset.py
index eb76f78d6..b32cf7095 100644
--- a/lerobot/common/datasets/lerobot_dataset.py
+++ b/lerobot/common/datasets/lerobot_dataset.py
@@ -15,202 +15,946 @@
# limitations under the License.
import logging
import os
+import shutil
+from functools import cached_property
from pathlib import Path
from typing import Callable
import datasets
+import numpy as np
+import PIL.Image
import torch
import torch.utils
+from datasets import load_dataset
+from huggingface_hub import create_repo, snapshot_download, upload_folder
-from lerobot.common.datasets.compute_stats import aggregate_stats
+from lerobot.common.datasets.compute_stats import aggregate_stats, compute_stats
+from lerobot.common.datasets.image_writer import AsyncImageWriter, write_image
from lerobot.common.datasets.utils import (
- calculate_episode_data_index,
- load_episode_data_index,
- load_hf_dataset,
+ DEFAULT_FEATURES,
+ DEFAULT_IMAGE_PATH,
+ EPISODES_PATH,
+ INFO_PATH,
+ STATS_PATH,
+ TASKS_PATH,
+ append_jsonlines,
+ check_delta_timestamps,
+ check_timestamps_sync,
+ check_version_compatibility,
+ create_branch,
+ create_empty_dataset_info,
+ create_lerobot_dataset_card,
+ get_delta_indices,
+ get_episode_data_index,
+ get_features_from_robot,
+ get_hf_features_from_features,
+ get_hub_safe_version,
+ hf_transform_to_torch,
+ load_episodes,
load_info,
- load_previous_and_future_frames,
load_stats,
- load_videos,
- reset_episode_index,
+ load_tasks,
+ serialize_dict,
+ write_json,
+ write_parquet,
)
-from lerobot.common.datasets.video_utils import VideoFrame, load_from_videos
+from lerobot.common.datasets.video_utils import (
+ VideoFrame,
+ decode_video_frames_torchvision,
+ encode_video_frames,
+ get_video_info,
+)
+from lerobot.common.robot_devices.robots.utils import Robot
# For maintainers, see lerobot/common/datasets/push_dataset_to_hub/CODEBASE_VERSION.md
-CODEBASE_VERSION = "v1.6"
-DATA_DIR = Path(os.environ["DATA_DIR"]) if "DATA_DIR" in os.environ else None
+CODEBASE_VERSION = "v2.0"
+LEROBOT_HOME = Path(os.getenv("LEROBOT_HOME", "~/.cache/huggingface/lerobot")).expanduser()
+
+
+class LeRobotDatasetMetadata:
+ def __init__(
+ self,
+ repo_id: str,
+ root: str | Path | None = None,
+ local_files_only: bool = False,
+ ):
+ self.repo_id = repo_id
+ self.root = Path(root) if root is not None else LEROBOT_HOME / repo_id
+ self.local_files_only = local_files_only
+
+ # Load metadata
+ (self.root / "meta").mkdir(exist_ok=True, parents=True)
+ self.pull_from_repo(allow_patterns="meta/")
+ self.info = load_info(self.root)
+ self.stats = load_stats(self.root)
+ self.tasks = load_tasks(self.root)
+ self.episodes = load_episodes(self.root)
+
+ def pull_from_repo(
+ self,
+ allow_patterns: list[str] | str | None = None,
+ ignore_patterns: list[str] | str | None = None,
+ ) -> None:
+ snapshot_download(
+ self.repo_id,
+ repo_type="dataset",
+ revision=self._hub_version,
+ local_dir=self.root,
+ allow_patterns=allow_patterns,
+ ignore_patterns=ignore_patterns,
+ local_files_only=self.local_files_only,
+ )
+
+ @cached_property
+ def _hub_version(self) -> str | None:
+ return None if self.local_files_only else get_hub_safe_version(self.repo_id, CODEBASE_VERSION)
+
+ @property
+ def _version(self) -> str:
+ """Codebase version used to create this dataset."""
+ return self.info["codebase_version"]
+
+ def get_data_file_path(self, ep_index: int) -> Path:
+ ep_chunk = self.get_episode_chunk(ep_index)
+ fpath = self.data_path.format(episode_chunk=ep_chunk, episode_index=ep_index)
+ return Path(fpath)
+
+ def get_video_file_path(self, ep_index: int, vid_key: str) -> Path:
+ ep_chunk = self.get_episode_chunk(ep_index)
+ fpath = self.video_path.format(episode_chunk=ep_chunk, video_key=vid_key, episode_index=ep_index)
+ return Path(fpath)
+
+ def get_episode_chunk(self, ep_index: int) -> int:
+ return ep_index // self.chunks_size
+
+ @property
+ def data_path(self) -> str:
+ """Formattable string for the parquet files."""
+ return self.info["data_path"]
+
+ @property
+ def video_path(self) -> str | None:
+ """Formattable string for the video files."""
+ return self.info["video_path"]
+
+ @property
+ def robot_type(self) -> str | None:
+ """Robot type used in recording this dataset."""
+ return self.info["robot_type"]
+
+ @property
+ def fps(self) -> int:
+ """Frames per second used during data collection."""
+ return self.info["fps"]
+
+ @property
+ def features(self) -> dict[str, dict]:
+ """All features contained in the dataset."""
+ return self.info["features"]
+
+ @property
+ def image_keys(self) -> list[str]:
+ """Keys to access visual modalities stored as images."""
+ return [key for key, ft in self.features.items() if ft["dtype"] == "image"]
+
+ @property
+ def video_keys(self) -> list[str]:
+ """Keys to access visual modalities stored as videos."""
+ return [key for key, ft in self.features.items() if ft["dtype"] == "video"]
+
+ @property
+ def camera_keys(self) -> list[str]:
+ """Keys to access visual modalities (regardless of their storage method)."""
+ return [key for key, ft in self.features.items() if ft["dtype"] in ["video", "image"]]
+
+ @property
+ def names(self) -> dict[str, list | dict]:
+ """Names of the various dimensions of vector modalities."""
+ return {key: ft["names"] for key, ft in self.features.items()}
+
+ @property
+ def shapes(self) -> dict:
+ """Shapes for the different features."""
+ return {key: tuple(ft["shape"]) for key, ft in self.features.items()}
+
+ @property
+ def total_episodes(self) -> int:
+ """Total number of episodes available."""
+ return self.info["total_episodes"]
+
+ @property
+ def total_frames(self) -> int:
+ """Total number of frames saved in this dataset."""
+ return self.info["total_frames"]
+
+ @property
+ def total_tasks(self) -> int:
+ """Total number of different tasks performed in this dataset."""
+ return self.info["total_tasks"]
+
+ @property
+ def total_chunks(self) -> int:
+ """Total number of chunks (groups of episodes)."""
+ return self.info["total_chunks"]
+
+ @property
+ def chunks_size(self) -> int:
+ """Max number of episodes per chunk."""
+ return self.info["chunks_size"]
+
+ @property
+ def task_to_task_index(self) -> dict:
+ return {task: task_idx for task_idx, task in self.tasks.items()}
+
+ def get_task_index(self, task: str) -> int:
+ """
+ Given a task in natural language, returns its task_index if the task already exists in the dataset,
+ otherwise creates a new task_index.
+ """
+ task_index = self.task_to_task_index.get(task, None)
+ return task_index if task_index is not None else self.total_tasks
+
+ def save_episode(self, episode_index: int, episode_length: int, task: str, task_index: int) -> None:
+ self.info["total_episodes"] += 1
+ self.info["total_frames"] += episode_length
+
+ if task_index not in self.tasks:
+ self.info["total_tasks"] += 1
+ self.tasks[task_index] = task
+ task_dict = {
+ "task_index": task_index,
+ "task": task,
+ }
+ append_jsonlines(task_dict, self.root / TASKS_PATH)
+
+ chunk = self.get_episode_chunk(episode_index)
+ if chunk >= self.total_chunks:
+ self.info["total_chunks"] += 1
+
+ self.info["splits"] = {"train": f"0:{self.info['total_episodes']}"}
+ self.info["total_videos"] += len(self.video_keys)
+ write_json(self.info, self.root / INFO_PATH)
+
+ episode_dict = {
+ "episode_index": episode_index,
+ "tasks": [task],
+ "length": episode_length,
+ }
+ self.episodes.append(episode_dict)
+ append_jsonlines(episode_dict, self.root / EPISODES_PATH)
+
+ # TODO(aliberts): refactor stats in save_episodes
+ # image_sampling = int(self.fps / 2) # sample 2 img/s for the stats
+ # ep_stats = compute_episode_stats(episode_buffer, self.features, episode_length, image_sampling=image_sampling)
+ # ep_stats = serialize_dict(ep_stats)
+ # append_jsonlines(ep_stats, self.root / STATS_PATH)
+
+ def write_video_info(self) -> None:
+ """
+ Warning: this function writes info from first episode videos, implicitly assuming that all videos have
+ been encoded the same way. Also, this means it assumes the first episode exists.
+ """
+ for key in self.video_keys:
+ if not self.features[key].get("info", None):
+ video_path = self.root / self.get_video_file_path(ep_index=0, vid_key=key)
+ self.info["features"][key]["info"] = get_video_info(video_path)
+
+ write_json(self.info, self.root / INFO_PATH)
+
+ def __repr__(self):
+ feature_keys = list(self.features)
+ return (
+ f"{self.__class__.__name__}({{\n"
+ f" Repository ID: '{self.repo_id}',\n"
+ f" Total episodes: '{self.total_episodes}',\n"
+ f" Total frames: '{self.total_frames}',\n"
+ f" Features: '{feature_keys}',\n"
+ "})',\n"
+ )
+
+ @classmethod
+ def create(
+ cls,
+ repo_id: str,
+ fps: int,
+ root: str | Path | None = None,
+ robot: Robot | None = None,
+ robot_type: str | None = None,
+ features: dict | None = None,
+ use_videos: bool = True,
+ ) -> "LeRobotDatasetMetadata":
+ """Creates metadata for a LeRobotDataset."""
+ obj = cls.__new__(cls)
+ obj.repo_id = repo_id
+ obj.root = Path(root) if root is not None else LEROBOT_HOME / repo_id
+
+ obj.root.mkdir(parents=True, exist_ok=False)
+
+ if robot is not None:
+ features = get_features_from_robot(robot, use_videos)
+ robot_type = robot.robot_type
+ if not all(cam.fps == fps for cam in robot.cameras.values()):
+ logging.warning(
+ f"Some cameras in your {robot.robot_type} robot don't have an fps matching the fps of your dataset."
+ "In this case, frames from lower fps cameras will be repeated to fill in the blanks."
+ )
+ elif features is None:
+ raise ValueError(
+ "Dataset features must either come from a Robot or explicitly passed upon creation."
+ )
+ else:
+ # TODO(aliberts, rcadene): implement sanity check for features
+ features = {**features, **DEFAULT_FEATURES}
+
+ obj.tasks, obj.stats, obj.episodes = {}, {}, []
+ obj.info = create_empty_dataset_info(CODEBASE_VERSION, fps, robot_type, features, use_videos)
+ if len(obj.video_keys) > 0 and not use_videos:
+ raise ValueError()
+ write_json(obj.info, obj.root / INFO_PATH)
+ obj.local_files_only = True
+ return obj
class LeRobotDataset(torch.utils.data.Dataset):
def __init__(
self,
repo_id: str,
- root: Path | None = DATA_DIR,
- split: str = "train",
+ root: str | Path | None = None,
+ episodes: list[int] | None = None,
image_transforms: Callable | None = None,
delta_timestamps: dict[list[float]] | None = None,
+ tolerance_s: float = 1e-4,
+ download_videos: bool = True,
+ local_files_only: bool = False,
video_backend: str | None = None,
):
+ """
+ 2 modes are available for instantiating this class, depending on 2 different use cases:
+
+ 1. Your dataset already exists:
+ - On your local disk in the 'root' folder. This is typically the case when you recorded your
+ dataset locally and you may or may not have pushed it to the hub yet. Instantiating this class
+ with 'root' will load your dataset directly from disk. This can happen while you're offline (no
+ internet connection), in that case, use local_files_only=True.
+
+ - On the Hugging Face Hub at the address https://huggingface.co/datasets/{repo_id} and not on
+ your local disk in the 'root' folder. Instantiating this class with this 'repo_id' will download
+ the dataset from that address and load it, pending your dataset is compliant with
+ codebase_version v2.0. If your dataset has been created before this new format, you will be
+ prompted to convert it using our conversion script from v1.6 to v2.0, which you can find at
+ lerobot/common/datasets/v2/convert_dataset_v1_to_v2.py.
+
+
+ 2. Your dataset doesn't already exists (either on local disk or on the Hub): you can create an empty
+ LeRobotDataset with the 'create' classmethod. This can be used for recording a dataset or port an
+ existing dataset to the LeRobotDataset format.
+
+
+ In terms of files, LeRobotDataset encapsulates 3 main things:
+ - metadata:
+ - info contains various information about the dataset like shapes, keys, fps etc.
+ - stats stores the dataset statistics of the different modalities for normalization
+ - tasks contains the prompts for each task of the dataset, which can be used for
+ task-conditionned training.
+ - hf_dataset (from datasets.Dataset), which will read any values from parquet files.
+ - videos (optional) from which frames are loaded to be synchronous with data from parquet files.
+
+ A typical LeRobotDataset looks like this from its root path:
+ .
+ ├── data
+ │ ├── chunk-000
+ │ │ ├── episode_000000.parquet
+ │ │ ├── episode_000001.parquet
+ │ │ ├── episode_000002.parquet
+ │ │ └── ...
+ │ ├── chunk-001
+ │ │ ├── episode_001000.parquet
+ │ │ ├── episode_001001.parquet
+ │ │ ├── episode_001002.parquet
+ │ │ └── ...
+ │ └── ...
+ ├── meta
+ │ ├── episodes.jsonl
+ │ ├── info.json
+ │ ├── stats.json
+ │ └── tasks.jsonl
+ └── videos
+ ├── chunk-000
+ │ ├── observation.images.laptop
+ │ │ ├── episode_000000.mp4
+ │ │ ├── episode_000001.mp4
+ │ │ ├── episode_000002.mp4
+ │ │ └── ...
+ │ ├── observation.images.phone
+ │ │ ├── episode_000000.mp4
+ │ │ ├── episode_000001.mp4
+ │ │ ├── episode_000002.mp4
+ │ │ └── ...
+ ├── chunk-001
+ └── ...
+
+ Note that this file-based structure is designed to be as versatile as possible. The files are split by
+ episodes which allows a more granular control over which episodes one wants to use and download. The
+ structure of the dataset is entirely described in the info.json file, which can be easily downloaded
+ or viewed directly on the hub before downloading any actual data. The type of files used are very
+ simple and do not need complex tools to be read, it only uses .parquet, .json and .mp4 files (and .md
+ for the README).
+
+ Args:
+ repo_id (str): This is the repo id that will be used to fetch the dataset. Locally, the dataset
+ will be stored under root/repo_id.
+ root (Path | None, optional): Local directory to use for downloading/writing files. You can also
+ set the LEROBOT_HOME environment variable to point to a different location. Defaults to
+ '~/.cache/huggingface/lerobot'.
+ episodes (list[int] | None, optional): If specified, this will only load episodes specified by
+ their episode_index in this list. Defaults to None.
+ image_transforms (Callable | None, optional): You can pass standard v2 image transforms from
+ torchvision.transforms.v2 here which will be applied to visual modalities (whether they come
+ from videos or images). Defaults to None.
+ delta_timestamps (dict[list[float]] | None, optional): _description_. Defaults to None.
+ tolerance_s (float, optional): Tolerance in seconds used to ensure data timestamps are actually in
+ sync with the fps value. It is used at the init of the dataset to make sure that each
+ timestamps is separated to the next by 1/fps +/- tolerance_s. This also applies to frames
+ decoded from video files. It is also used to check that `delta_timestamps` (when provided) are
+ multiples of 1/fps. Defaults to 1e-4.
+ download_videos (bool, optional): Flag to download the videos. Note that when set to True but the
+ video files are already present on local disk, they won't be downloaded again. Defaults to
+ True.
+ local_files_only (bool, optional): Flag to use local files only. If True, no requests to the hub
+ will be made. Defaults to False.
+ video_backend (str | None, optional): Video backend to use for decoding videos. There is currently
+ a single option which is the pyav decoder used by Torchvision. Defaults to pyav.
+ """
super().__init__()
self.repo_id = repo_id
- self.root = root
- self.split = split
+ self.root = Path(root) if root else LEROBOT_HOME / repo_id
self.image_transforms = image_transforms
self.delta_timestamps = delta_timestamps
- # load data from hub or locally when root is provided
+ self.episodes = episodes
+ self.tolerance_s = tolerance_s
+ self.video_backend = video_backend if video_backend else "pyav"
+ self.delta_indices = None
+ self.local_files_only = local_files_only
+
+ # Unused attributes
+ self.image_writer = None
+ self.episode_buffer = None
+
+ self.root.mkdir(exist_ok=True, parents=True)
+
+ # Load metadata
+ self.meta = LeRobotDatasetMetadata(self.repo_id, self.root, self.local_files_only)
+
+ # Check version
+ check_version_compatibility(self.repo_id, self.meta._version, CODEBASE_VERSION)
+
+ # Load actual data
+ self.download_episodes(download_videos)
+ self.hf_dataset = self.load_hf_dataset()
+ self.episode_data_index = get_episode_data_index(self.meta.episodes, self.episodes)
+
+ # Check timestamps
+ check_timestamps_sync(self.hf_dataset, self.episode_data_index, self.fps, self.tolerance_s)
+
+ # Setup delta_indices
+ if self.delta_timestamps is not None:
+ check_delta_timestamps(self.delta_timestamps, self.fps, self.tolerance_s)
+ self.delta_indices = get_delta_indices(self.delta_timestamps, self.fps)
+
+ # Available stats implies all videos have been encoded and dataset is iterable
+ self.consolidated = self.meta.stats is not None
+
+ def push_to_hub(
+ self,
+ tags: list | None = None,
+ license: str | None = "apache-2.0",
+ push_videos: bool = True,
+ private: bool = False,
+ **card_kwargs,
+ ) -> None:
+ if not self.consolidated:
+ logging.warning(
+ "You are trying to upload to the hub a LeRobotDataset that has not been consolidated yet. "
+ "Consolidating first."
+ )
+ self.consolidate()
+
+ ignore_patterns = ["images/"]
+ if not push_videos:
+ ignore_patterns.append("videos/")
+
+ create_repo(
+ repo_id=self.repo_id,
+ private=private,
+ repo_type="dataset",
+ exist_ok=True,
+ )
+
+ upload_folder(
+ repo_id=self.repo_id,
+ folder_path=self.root,
+ repo_type="dataset",
+ ignore_patterns=ignore_patterns,
+ )
+ card = create_lerobot_dataset_card(
+ tags=tags, dataset_info=self.meta.info, license=license, **card_kwargs
+ )
+ card.push_to_hub(repo_id=self.repo_id, repo_type="dataset")
+ create_branch(repo_id=self.repo_id, branch=CODEBASE_VERSION, repo_type="dataset")
+
+ def pull_from_repo(
+ self,
+ allow_patterns: list[str] | str | None = None,
+ ignore_patterns: list[str] | str | None = None,
+ ) -> None:
+ snapshot_download(
+ self.repo_id,
+ repo_type="dataset",
+ revision=self.meta._hub_version,
+ local_dir=self.root,
+ allow_patterns=allow_patterns,
+ ignore_patterns=ignore_patterns,
+ local_files_only=self.local_files_only,
+ )
+
+ def download_episodes(self, download_videos: bool = True) -> None:
+ """Downloads the dataset from the given 'repo_id' at the provided version. If 'episodes' is given, this
+ will only download those episodes (selected by their episode_index). If 'episodes' is None, the whole
+ dataset will be downloaded. Thanks to the behavior of snapshot_download, if the files are already present
+ in 'local_dir', they won't be downloaded again.
+ """
# TODO(rcadene, aliberts): implement faster transfer
# https://huggingface.co/docs/huggingface_hub/en/guides/download#faster-downloads
- self.hf_dataset = load_hf_dataset(repo_id, CODEBASE_VERSION, root, split)
- if split == "train":
- self.episode_data_index = load_episode_data_index(repo_id, CODEBASE_VERSION, root)
+ files = None
+ ignore_patterns = None if download_videos else "videos/"
+ if self.episodes is not None:
+ files = [str(self.meta.get_data_file_path(ep_idx)) for ep_idx in self.episodes]
+ if len(self.meta.video_keys) > 0 and download_videos:
+ video_files = [
+ str(self.meta.get_video_file_path(ep_idx, vid_key))
+ for vid_key in self.meta.video_keys
+ for ep_idx in self.episodes
+ ]
+ files += video_files
+
+ self.pull_from_repo(allow_patterns=files, ignore_patterns=ignore_patterns)
+
+ def load_hf_dataset(self) -> datasets.Dataset:
+ """hf_dataset contains all the observations, states, actions, rewards, etc."""
+ if self.episodes is None:
+ path = str(self.root / "data")
+ hf_dataset = load_dataset("parquet", data_dir=path, split="train")
else:
- self.episode_data_index = calculate_episode_data_index(self.hf_dataset)
- self.hf_dataset = reset_episode_index(self.hf_dataset)
- self.stats = load_stats(repo_id, CODEBASE_VERSION, root)
- self.info = load_info(repo_id, CODEBASE_VERSION, root)
- if self.video:
- self.videos_dir = load_videos(repo_id, CODEBASE_VERSION, root)
- self.video_backend = video_backend if video_backend is not None else "pyav"
+ files = [str(self.root / self.meta.get_data_file_path(ep_idx)) for ep_idx in self.episodes]
+ hf_dataset = load_dataset("parquet", data_files=files, split="train")
+
+ # TODO(aliberts): hf_dataset.set_format("torch")
+ hf_dataset.set_transform(hf_transform_to_torch)
+
+ return hf_dataset
@property
def fps(self) -> int:
"""Frames per second used during data collection."""
- return self.info["fps"]
+ return self.meta.fps
@property
- def video(self) -> bool:
- """Returns True if this dataset loads video frames from mp4 files.
- Returns False if it only loads images from png files.
- """
- return self.info.get("video", False)
+ def num_frames(self) -> int:
+ """Number of frames in selected episodes."""
+ return len(self.hf_dataset) if self.hf_dataset is not None else self.meta.total_frames
@property
- def features(self) -> datasets.Features:
- return self.hf_dataset.features
+ def num_episodes(self) -> int:
+ """Number of episodes selected."""
+ return len(self.episodes) if self.episodes is not None else self.meta.total_episodes
@property
- def camera_keys(self) -> list[str]:
- """Keys to access image and video stream from cameras."""
- keys = []
- for key, feats in self.hf_dataset.features.items():
- if isinstance(feats, (datasets.Image, VideoFrame)):
- keys.append(key)
- return keys
+ def features(self) -> dict[str, dict]:
+ return self.meta.features
@property
- def video_frame_keys(self) -> list[str]:
- """Keys to access video frames that requires to be decoded into images.
+ def hf_features(self) -> datasets.Features:
+ """Features of the hf_dataset."""
+ if self.hf_dataset is not None:
+ return self.hf_dataset.features
+ else:
+ return get_hf_features_from_features(self.features)
- Note: It is empty if the dataset contains images only,
- or equal to `self.cameras` if the dataset contains videos only,
- or can even be a subset of `self.cameras` in a case of a mixed image/video dataset.
- """
- video_frame_keys = []
- for key, feats in self.hf_dataset.features.items():
- if isinstance(feats, VideoFrame):
- video_frame_keys.append(key)
- return video_frame_keys
+ def _get_query_indices(self, idx: int, ep_idx: int) -> tuple[dict[str, list[int | bool]]]:
+ ep_start = self.episode_data_index["from"][ep_idx]
+ ep_end = self.episode_data_index["to"][ep_idx]
+ query_indices = {
+ key: [max(ep_start.item(), min(ep_end.item() - 1, idx + delta)) for delta in delta_idx]
+ for key, delta_idx in self.delta_indices.items()
+ }
+ padding = { # Pad values outside of current episode range
+ f"{key}_is_pad": torch.BoolTensor(
+ [(idx + delta < ep_start.item()) | (idx + delta >= ep_end.item()) for delta in delta_idx]
+ )
+ for key, delta_idx in self.delta_indices.items()
+ }
+ return query_indices, padding
- @property
- def num_samples(self) -> int:
- """Number of samples/frames."""
- return len(self.hf_dataset)
+ def _get_query_timestamps(
+ self,
+ current_ts: float,
+ query_indices: dict[str, list[int]] | None = None,
+ ) -> dict[str, list[float]]:
+ query_timestamps = {}
+ for key in self.meta.video_keys:
+ if query_indices is not None and key in query_indices:
+ timestamps = self.hf_dataset.select(query_indices[key])["timestamp"]
+ query_timestamps[key] = torch.stack(timestamps).tolist()
+ else:
+ query_timestamps[key] = [current_ts]
- @property
- def num_episodes(self) -> int:
- """Number of episodes."""
- return len(self.hf_dataset.unique("episode_index"))
+ return query_timestamps
- @property
- def tolerance_s(self) -> float:
- """Tolerance in seconds used to discard loaded frames when their timestamps
- are not close enough from the requested frames. It is only used when `delta_timestamps`
- is provided or when loading video frames from mp4 files.
+ def _query_hf_dataset(self, query_indices: dict[str, list[int]]) -> dict:
+ return {
+ key: torch.stack(self.hf_dataset.select(q_idx)[key])
+ for key, q_idx in query_indices.items()
+ if key not in self.meta.video_keys
+ }
+
+ def _query_videos(self, query_timestamps: dict[str, list[float]], ep_idx: int) -> dict:
+ """Note: When using data workers (e.g. DataLoader with num_workers>0), do not call this function
+ in the main process (e.g. by using a second Dataloader with num_workers=0). It will result in a
+ Segmentation Fault. This probably happens because a memory reference to the video loader is created in
+ the main process and a subprocess fails to access it.
"""
- # 1e-4 to account for possible numerical error
- return 1 / self.fps - 1e-4
+ item = {}
+ for vid_key, query_ts in query_timestamps.items():
+ video_path = self.root / self.meta.get_video_file_path(ep_idx, vid_key)
+ frames = decode_video_frames_torchvision(
+ video_path, query_ts, self.tolerance_s, self.video_backend
+ )
+ item[vid_key] = frames.squeeze(0)
+
+ return item
+
+ def _add_padding_keys(self, item: dict, padding: dict[str, list[bool]]) -> dict:
+ for key, val in padding.items():
+ item[key] = torch.BoolTensor(val)
+ return item
def __len__(self):
- return self.num_samples
+ return self.num_frames
- def __getitem__(self, idx):
+ def __getitem__(self, idx) -> dict:
item = self.hf_dataset[idx]
+ ep_idx = item["episode_index"].item()
- if self.delta_timestamps is not None:
- item = load_previous_and_future_frames(
- item,
- self.hf_dataset,
- self.episode_data_index,
- self.delta_timestamps,
- self.tolerance_s,
- )
+ query_indices = None
+ if self.delta_indices is not None:
+ current_ep_idx = self.episodes.index(ep_idx) if self.episodes is not None else ep_idx
+ query_indices, padding = self._get_query_indices(idx, current_ep_idx)
+ query_result = self._query_hf_dataset(query_indices)
+ item = {**item, **padding}
+ for key, val in query_result.items():
+ item[key] = val
- if self.video:
- item = load_from_videos(
- item,
- self.video_frame_keys,
- self.videos_dir,
- self.tolerance_s,
- self.video_backend,
- )
+ if len(self.meta.video_keys) > 0:
+ current_ts = item["timestamp"].item()
+ query_timestamps = self._get_query_timestamps(current_ts, query_indices)
+ video_frames = self._query_videos(query_timestamps, ep_idx)
+ item = {**video_frames, **item}
if self.image_transforms is not None:
- for cam in self.camera_keys:
+ image_keys = self.meta.camera_keys
+ for cam in image_keys:
item[cam] = self.image_transforms(item[cam])
return item
def __repr__(self):
+ feature_keys = list(self.features)
return (
- f"{self.__class__.__name__}(\n"
- f" Repository ID: '{self.repo_id}',\n"
- f" Split: '{self.split}',\n"
- f" Number of Samples: {self.num_samples},\n"
- f" Number of Episodes: {self.num_episodes},\n"
- f" Type: {'video (.mp4)' if self.video else 'image (.png)'},\n"
- f" Recorded Frames per Second: {self.fps},\n"
- f" Camera Keys: {self.camera_keys},\n"
- f" Video Frame Keys: {self.video_frame_keys if self.video else 'N/A'},\n"
- f" Transformations: {self.image_transforms},\n"
- f" Codebase Version: {self.info.get('codebase_version', '< v1.6')},\n"
- f")"
+ f"{self.__class__.__name__}({{\n"
+ f" Repository ID: '{self.repo_id}',\n"
+ f" Number of selected episodes: '{self.num_episodes}',\n"
+ f" Number of selected samples: '{self.num_frames}',\n"
+ f" Features: '{feature_keys}',\n"
+ "})',\n"
+ )
+
+ def create_episode_buffer(self, episode_index: int | None = None) -> dict:
+ current_ep_idx = self.meta.total_episodes if episode_index is None else episode_index
+ return {
+ "size": 0,
+ **{key: current_ep_idx if key == "episode_index" else [] for key in self.features},
+ }
+
+ def _get_image_file_path(self, episode_index: int, image_key: str, frame_index: int) -> Path:
+ fpath = DEFAULT_IMAGE_PATH.format(
+ image_key=image_key, episode_index=episode_index, frame_index=frame_index
)
+ return self.root / fpath
+
+ def _save_image(self, image: torch.Tensor | np.ndarray | PIL.Image.Image, fpath: Path) -> None:
+ if self.image_writer is None:
+ if isinstance(image, torch.Tensor):
+ image = image.cpu().numpy()
+ write_image(image, fpath)
+ else:
+ self.image_writer.save_image(image=image, fpath=fpath)
+
+ def add_frame(self, frame: dict) -> None:
+ """
+ This function only adds the frame to the episode_buffer. Apart from images — which are written in a
+ temporary directory — nothing is written to disk. To save those frames, the 'save_episode()' method
+ then needs to be called.
+ """
+ # TODO(aliberts, rcadene): Add sanity check for the input, check it's numpy or torch,
+ # check the dtype and shape matches, etc.
+
+ if self.episode_buffer is None:
+ self.episode_buffer = self.create_episode_buffer()
+
+ frame_index = self.episode_buffer["size"]
+ timestamp = frame.pop("timestamp") if "timestamp" in frame else frame_index / self.fps
+ self.episode_buffer["frame_index"].append(frame_index)
+ self.episode_buffer["timestamp"].append(timestamp)
+
+ for key in frame:
+ if key not in self.features:
+ raise ValueError(key)
+
+ if self.features[key]["dtype"] not in ["image", "video"]:
+ item = frame[key].numpy() if isinstance(frame[key], torch.Tensor) else frame[key]
+ self.episode_buffer[key].append(item)
+ elif self.features[key]["dtype"] in ["image", "video"]:
+ img_path = self._get_image_file_path(
+ episode_index=self.episode_buffer["episode_index"], image_key=key, frame_index=frame_index
+ )
+ if frame_index == 0:
+ img_path.parent.mkdir(parents=True, exist_ok=True)
+ self._save_image(frame[key], img_path)
+ self.episode_buffer[key].append(str(img_path))
+
+ self.episode_buffer["size"] += 1
+
+ def save_episode(self, task: str, encode_videos: bool = True, episode_data: dict | None = None) -> None:
+ """
+ This will save to disk the current episode in self.episode_buffer. Note that since it affects files on
+ disk, it sets self.consolidated to False to ensure proper consolidation later on before uploading to
+ the hub.
+
+ Use 'encode_videos' if you want to encode videos during the saving of this episode. Otherwise,
+ you can do it later with dataset.consolidate(). This is to give more flexibility on when to spend
+ time for video encoding.
+ """
+ if not episode_data:
+ episode_buffer = self.episode_buffer
+
+ episode_length = episode_buffer.pop("size")
+ episode_index = episode_buffer["episode_index"]
+ if episode_index != self.meta.total_episodes:
+ # TODO(aliberts): Add option to use existing episode_index
+ raise NotImplementedError(
+ "You might have manually provided the episode_buffer with an episode_index that doesn't "
+ "match the total number of episodes in the dataset. This is not supported for now."
+ )
+
+ if episode_length == 0:
+ raise ValueError(
+ "You must add one or several frames with `add_frame` before calling `add_episode`."
+ )
+
+ task_index = self.meta.get_task_index(task)
+
+ if not set(episode_buffer.keys()) == set(self.features):
+ raise ValueError()
+
+ for key, ft in self.features.items():
+ if key == "index":
+ episode_buffer[key] = np.arange(
+ self.meta.total_frames, self.meta.total_frames + episode_length
+ )
+ elif key == "episode_index":
+ episode_buffer[key] = np.full((episode_length,), episode_index)
+ elif key == "task_index":
+ episode_buffer[key] = np.full((episode_length,), task_index)
+ elif ft["dtype"] in ["image", "video"]:
+ continue
+ elif len(ft["shape"]) == 1 and ft["shape"][0] == 1:
+ episode_buffer[key] = np.array(episode_buffer[key], dtype=ft["dtype"])
+ elif len(ft["shape"]) == 1 and ft["shape"][0] > 1:
+ episode_buffer[key] = np.stack(episode_buffer[key])
+ else:
+ raise ValueError(key)
+
+ self._wait_image_writer()
+ self._save_episode_table(episode_buffer, episode_index)
+
+ self.meta.save_episode(episode_index, episode_length, task, task_index)
+
+ if encode_videos and len(self.meta.video_keys) > 0:
+ video_paths = self.encode_episode_videos(episode_index)
+ for key in self.meta.video_keys:
+ episode_buffer[key] = video_paths[key]
+
+ if not episode_data: # Reset the buffer
+ self.episode_buffer = self.create_episode_buffer()
+
+ self.consolidated = False
+
+ def _save_episode_table(self, episode_buffer: dict, episode_index: int) -> None:
+ episode_dict = {key: episode_buffer[key] for key in self.hf_features}
+ ep_dataset = datasets.Dataset.from_dict(episode_dict, features=self.hf_features, split="train")
+ ep_data_path = self.root / self.meta.get_data_file_path(ep_index=episode_index)
+ ep_data_path.parent.mkdir(parents=True, exist_ok=True)
+ write_parquet(ep_dataset, ep_data_path)
+
+ def clear_episode_buffer(self) -> None:
+ episode_index = self.episode_buffer["episode_index"]
+ if self.image_writer is not None:
+ for cam_key in self.meta.camera_keys:
+ img_dir = self._get_image_file_path(
+ episode_index=episode_index, image_key=cam_key, frame_index=0
+ ).parent
+ if img_dir.is_dir():
+ shutil.rmtree(img_dir)
+
+ # Reset the buffer
+ self.episode_buffer = self.create_episode_buffer()
+
+ def start_image_writer(self, num_processes: int = 0, num_threads: int = 4) -> None:
+ if isinstance(self.image_writer, AsyncImageWriter):
+ logging.warning(
+ "You are starting a new AsyncImageWriter that is replacing an already existing one in the dataset."
+ )
+
+ self.image_writer = AsyncImageWriter(
+ num_processes=num_processes,
+ num_threads=num_threads,
+ )
+
+ def stop_image_writer(self) -> None:
+ """
+ Whenever wrapping this dataset inside a parallelized DataLoader, this needs to be called first to
+ remove the image_write in order for the LeRobotDataset object to be pickleable and parallelized.
+ """
+ if self.image_writer is not None:
+ self.image_writer.stop()
+ self.image_writer = None
+
+ def _wait_image_writer(self) -> None:
+ """Wait for asynchronous image writer to finish."""
+ if self.image_writer is not None:
+ self.image_writer.wait_until_done()
+
+ def encode_videos(self) -> None:
+ """
+ Use ffmpeg to convert frames stored as png into mp4 videos.
+ Note: `encode_video_frames` is a blocking call. Making it asynchronous shouldn't speedup encoding,
+ since video encoding with ffmpeg is already using multithreading.
+ """
+ for ep_idx in range(self.meta.total_episodes):
+ self.encode_episode_videos(ep_idx)
+
+ def encode_episode_videos(self, episode_index: int) -> dict:
+ """
+ Use ffmpeg to convert frames stored as png into mp4 videos.
+ Note: `encode_video_frames` is a blocking call. Making it asynchronous shouldn't speedup encoding,
+ since video encoding with ffmpeg is already using multithreading.
+ """
+ video_paths = {}
+ for key in self.meta.video_keys:
+ video_path = self.root / self.meta.get_video_file_path(episode_index, key)
+ video_paths[key] = str(video_path)
+ if video_path.is_file():
+ # Skip if video is already encoded. Could be the case when resuming data recording.
+ continue
+ img_dir = self._get_image_file_path(
+ episode_index=episode_index, image_key=key, frame_index=0
+ ).parent
+ encode_video_frames(img_dir, video_path, self.fps, overwrite=True)
+
+ return video_paths
+
+ def consolidate(self, run_compute_stats: bool = True, keep_image_files: bool = False) -> None:
+ self.hf_dataset = self.load_hf_dataset()
+ self.episode_data_index = get_episode_data_index(self.meta.episodes, self.episodes)
+ check_timestamps_sync(self.hf_dataset, self.episode_data_index, self.fps, self.tolerance_s)
+
+ if len(self.meta.video_keys) > 0:
+ self.encode_videos()
+ self.meta.write_video_info()
+
+ if not keep_image_files:
+ img_dir = self.root / "images"
+ if img_dir.is_dir():
+ shutil.rmtree(self.root / "images")
+
+ video_files = list(self.root.rglob("*.mp4"))
+ assert len(video_files) == self.num_episodes * len(self.meta.video_keys)
+
+ parquet_files = list(self.root.rglob("*.parquet"))
+ assert len(parquet_files) == self.num_episodes
+
+ if run_compute_stats:
+ self.stop_image_writer()
+ # TODO(aliberts): refactor stats in save_episodes
+ self.meta.stats = compute_stats(self)
+ serialized_stats = serialize_dict(self.meta.stats)
+ write_json(serialized_stats, self.root / STATS_PATH)
+ self.consolidated = True
+ else:
+ logging.warning(
+ "Skipping computation of the dataset statistics, dataset is not fully consolidated."
+ )
@classmethod
- def from_preloaded(
+ def create(
cls,
- repo_id: str = "from_preloaded",
- root: Path | None = None,
- split: str = "train",
- transform: callable = None,
- delta_timestamps: dict[list[float]] | None = None,
- # additional preloaded attributes
- hf_dataset=None,
- episode_data_index=None,
- stats=None,
- info=None,
- videos_dir=None,
- video_backend=None,
+ repo_id: str,
+ fps: int,
+ root: str | Path | None = None,
+ robot: Robot | None = None,
+ robot_type: str | None = None,
+ features: dict | None = None,
+ use_videos: bool = True,
+ tolerance_s: float = 1e-4,
+ image_writer_processes: int = 0,
+ image_writer_threads: int = 0,
+ video_backend: str | None = None,
) -> "LeRobotDataset":
- """Create a LeRobot Dataset from existing data and attributes instead of loading from the filesystem.
+ """Create a LeRobot Dataset from scratch in order to record data."""
+ obj = cls.__new__(cls)
+ obj.meta = LeRobotDatasetMetadata.create(
+ repo_id=repo_id,
+ fps=fps,
+ root=root,
+ robot=robot,
+ robot_type=robot_type,
+ features=features,
+ use_videos=use_videos,
+ )
+ obj.repo_id = obj.meta.repo_id
+ obj.root = obj.meta.root
+ obj.local_files_only = obj.meta.local_files_only
+ obj.tolerance_s = tolerance_s
+ obj.image_writer = None
- It is especially useful when converting raw data into LeRobotDataset before saving the dataset
- on the filesystem or uploading to the hub.
+ if image_writer_processes or image_writer_threads:
+ obj.start_image_writer(image_writer_processes, image_writer_threads)
- Note: Meta-data attributes like `repo_id`, `version`, `root`, etc are optional and potentially
- meaningless depending on the downstream usage of the return dataset.
- """
- # create an empty object of type LeRobotDataset
- obj = cls.__new__(cls)
- obj.repo_id = repo_id
- obj.root = root
- obj.split = split
- obj.image_transforms = transform
- obj.delta_timestamps = delta_timestamps
- obj.hf_dataset = hf_dataset
- obj.episode_data_index = episode_data_index
- obj.stats = stats
- obj.info = info if info is not None else {}
- obj.videos_dir = videos_dir
+ # TODO(aliberts, rcadene, alexander-soare): Merge this with OnlineBuffer/DataBuffer
+ obj.episode_buffer = obj.create_episode_buffer()
+
+ # This bool indicates that the current LeRobotDataset instance is in sync with the files on disk. It
+ # is used to know when certain operations are need (for instance, computing dataset statistics). In
+ # order to be able to push the dataset to the hub, it needs to be consolidated first by calling
+ # self.consolidate().
+ obj.consolidated = True
+
+ obj.episodes = None
+ obj.hf_dataset = None
+ obj.image_transforms = None
+ obj.delta_timestamps = None
+ obj.delta_indices = None
+ obj.episode_data_index = None
obj.video_backend = video_backend if video_backend is not None else "pyav"
return obj
@@ -225,57 +969,56 @@ class MultiLeRobotDataset(torch.utils.data.Dataset):
def __init__(
self,
repo_ids: list[str],
- root: Path | None = DATA_DIR,
- split: str = "train",
+ root: str | Path | None = None,
+ episodes: dict | None = None,
image_transforms: Callable | None = None,
delta_timestamps: dict[list[float]] | None = None,
+ tolerances_s: dict | None = None,
+ download_videos: bool = True,
+ local_files_only: bool = False,
video_backend: str | None = None,
):
super().__init__()
self.repo_ids = repo_ids
+ self.root = Path(root) if root else LEROBOT_HOME
+ self.tolerances_s = tolerances_s if tolerances_s else {repo_id: 1e-4 for repo_id in repo_ids}
# Construct the underlying datasets passing everything but `transform` and `delta_timestamps` which
# are handled by this class.
self._datasets = [
LeRobotDataset(
repo_id,
- root=root,
- split=split,
- delta_timestamps=delta_timestamps,
+ root=self.root / repo_id,
+ episodes=episodes[repo_id] if episodes else None,
image_transforms=image_transforms,
+ delta_timestamps=delta_timestamps,
+ tolerance_s=self.tolerances_s[repo_id],
+ download_videos=download_videos,
+ local_files_only=local_files_only,
video_backend=video_backend,
)
for repo_id in repo_ids
]
- # Check that some properties are consistent across datasets. Note: We may relax some of these
- # consistency requirements in future iterations of this class.
- for repo_id, dataset in zip(self.repo_ids, self._datasets, strict=True):
- if dataset.info != self._datasets[0].info:
- raise ValueError(
- f"Detected a mismatch in dataset info between {self.repo_ids[0]} and {repo_id}. This is "
- "not yet supported."
- )
+
# Disable any data keys that are not common across all of the datasets. Note: we may relax this
# restriction in future iterations of this class. For now, this is necessary at least for being able
# to use PyTorch's default DataLoader collate function.
- self.disabled_data_keys = set()
- intersection_data_keys = set(self._datasets[0].hf_dataset.features)
- for dataset in self._datasets:
- intersection_data_keys.intersection_update(dataset.hf_dataset.features)
- if len(intersection_data_keys) == 0:
+ self.disabled_features = set()
+ intersection_features = set(self._datasets[0].features)
+ for ds in self._datasets:
+ intersection_features.intersection_update(ds.features)
+ if len(intersection_features) == 0:
raise RuntimeError(
- "Multiple datasets were provided but they had no keys common to all of them. The "
- "multi-dataset functionality currently only keeps common keys."
+ "Multiple datasets were provided but they had no keys common to all of them. "
+ "The multi-dataset functionality currently only keeps common keys."
)
- for repo_id, dataset in zip(self.repo_ids, self._datasets, strict=True):
- extra_keys = set(dataset.hf_dataset.features).difference(intersection_data_keys)
+ for repo_id, ds in zip(self.repo_ids, self._datasets, strict=True):
+ extra_keys = set(ds.features).difference(intersection_features)
logging.warning(
f"keys {extra_keys} of {repo_id} were disabled as they are not contained in all the "
"other datasets."
)
- self.disabled_data_keys.update(extra_keys)
+ self.disabled_features.update(extra_keys)
- self.root = root
- self.split = split
self.image_transforms = image_transforms
self.delta_timestamps = delta_timestamps
self.stats = aggregate_stats(self._datasets)
@@ -299,7 +1042,7 @@ def fps(self) -> int:
NOTE: Fow now, this relies on a check in __init__ to make sure all sub-datasets have the same info.
"""
- return self._datasets[0].info["fps"]
+ return self._datasets[0].meta.info["fps"]
@property
def video(self) -> bool:
@@ -309,13 +1052,13 @@ def video(self) -> bool:
NOTE: Fow now, this relies on a check in __init__ to make sure all sub-datasets have the same info.
"""
- return self._datasets[0].info.get("video", False)
+ return self._datasets[0].meta.info.get("video", False)
@property
def features(self) -> datasets.Features:
features = {}
for dataset in self._datasets:
- features.update({k: v for k, v in dataset.features.items() if k not in self.disabled_data_keys})
+ features.update({k: v for k, v in dataset.hf_features.items() if k not in self.disabled_features})
return features
@property
@@ -342,9 +1085,9 @@ def video_frame_keys(self) -> list[str]:
return video_frame_keys
@property
- def num_samples(self) -> int:
+ def num_frames(self) -> int:
"""Number of samples/frames."""
- return sum(d.num_samples for d in self._datasets)
+ return sum(d.num_frames for d in self._datasets)
@property
def num_episodes(self) -> int:
@@ -361,7 +1104,7 @@ def tolerance_s(self) -> float:
return 1 / self.fps - 1e-4
def __len__(self):
- return self.num_samples
+ return self.num_frames
def __getitem__(self, idx: int) -> dict[str, torch.Tensor]:
if idx >= len(self):
@@ -370,8 +1113,8 @@ def __getitem__(self, idx: int) -> dict[str, torch.Tensor]:
start_idx = 0
dataset_idx = 0
for dataset in self._datasets:
- if idx >= start_idx + dataset.num_samples:
- start_idx += dataset.num_samples
+ if idx >= start_idx + dataset.num_frames:
+ start_idx += dataset.num_frames
dataset_idx += 1
continue
break
@@ -379,7 +1122,7 @@ def __getitem__(self, idx: int) -> dict[str, torch.Tensor]:
raise AssertionError("We expect the loop to break out as long as the index is within bounds.")
item = self._datasets[dataset_idx][idx - start_idx]
item["dataset_index"] = torch.tensor(dataset_idx)
- for data_key in self.disabled_data_keys:
+ for data_key in self.disabled_features:
if data_key in item:
del item[data_key]
@@ -389,8 +1132,7 @@ def __repr__(self):
return (
f"{self.__class__.__name__}(\n"
f" Repository IDs: '{self.repo_ids}',\n"
- f" Split: '{self.split}',\n"
- f" Number of Samples: {self.num_samples},\n"
+ f" Number of Samples: {self.num_frames},\n"
f" Number of Episodes: {self.num_episodes},\n"
f" Type: {'video (.mp4)' if self.video else 'image (.png)'},\n"
f" Recorded Frames per Second: {self.fps},\n"
diff --git a/lerobot/common/datasets/online_buffer.py b/lerobot/common/datasets/online_buffer.py
index 6b093cda7..d907e4687 100644
--- a/lerobot/common/datasets/online_buffer.py
+++ b/lerobot/common/datasets/online_buffer.py
@@ -187,7 +187,7 @@ def add_data(self, data: dict[str, np.ndarray]):
assert data[OnlineBuffer.INDEX_KEY][0].item() == 0
# Shift the incoming indices if necessary.
- if self.num_samples > 0:
+ if self.num_frames > 0:
last_episode_index = self._data[OnlineBuffer.EPISODE_INDEX_KEY][next_index - 1]
last_data_index = self._data[OnlineBuffer.INDEX_KEY][next_index - 1]
data[OnlineBuffer.EPISODE_INDEX_KEY] += last_episode_index + 1
@@ -227,11 +227,11 @@ def num_episodes(self) -> int:
)
@property
- def num_samples(self) -> int:
+ def num_frames(self) -> int:
return np.count_nonzero(self._data[OnlineBuffer.OCCUPANCY_MASK_KEY])
def __len__(self):
- return self.num_samples
+ return self.num_frames
def _item_to_tensors(self, item: dict) -> dict:
item_ = {}
diff --git a/lerobot/common/datasets/push_dataset_to_hub/aloha_hdf5_format.py b/lerobot/common/datasets/push_dataset_to_hub/aloha_hdf5_format.py
index 52c4bba3d..e2973ef81 100644
--- a/lerobot/common/datasets/push_dataset_to_hub/aloha_hdf5_format.py
+++ b/lerobot/common/datasets/push_dataset_to_hub/aloha_hdf5_format.py
@@ -30,12 +30,12 @@
from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION
from lerobot.common.datasets.push_dataset_to_hub.utils import (
+ calculate_episode_data_index,
concatenate_episodes,
get_default_encoding,
save_images_concurrently,
)
from lerobot.common.datasets.utils import (
- calculate_episode_data_index,
hf_transform_to_torch,
)
from lerobot.common.datasets.video_utils import VideoFrame, encode_video_frames
diff --git a/lerobot/common/datasets/push_dataset_to_hub/cam_png_format.py b/lerobot/common/datasets/push_dataset_to_hub/cam_png_format.py
index be20c92cd..264925766 100644
--- a/lerobot/common/datasets/push_dataset_to_hub/cam_png_format.py
+++ b/lerobot/common/datasets/push_dataset_to_hub/cam_png_format.py
@@ -24,8 +24,11 @@
from PIL import Image as PILImage
from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION
-from lerobot.common.datasets.push_dataset_to_hub.utils import concatenate_episodes
-from lerobot.common.datasets.utils import calculate_episode_data_index, hf_transform_to_torch
+from lerobot.common.datasets.push_dataset_to_hub.utils import (
+ calculate_episode_data_index,
+ concatenate_episodes,
+)
+from lerobot.common.datasets.utils import hf_transform_to_torch
from lerobot.common.datasets.video_utils import VideoFrame
diff --git a/lerobot/common/datasets/push_dataset_to_hub/dora_parquet_format.py b/lerobot/common/datasets/push_dataset_to_hub/dora_parquet_format.py
index 72be130e3..95f9c0071 100644
--- a/lerobot/common/datasets/push_dataset_to_hub/dora_parquet_format.py
+++ b/lerobot/common/datasets/push_dataset_to_hub/dora_parquet_format.py
@@ -26,8 +26,8 @@
from datasets import Dataset, Features, Image, Sequence, Value
from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION
+from lerobot.common.datasets.push_dataset_to_hub.utils import calculate_episode_data_index
from lerobot.common.datasets.utils import (
- calculate_episode_data_index,
hf_transform_to_torch,
)
from lerobot.common.datasets.video_utils import VideoFrame
diff --git a/lerobot/common/datasets/push_dataset_to_hub/openx_rlds_format.py b/lerobot/common/datasets/push_dataset_to_hub/openx_rlds_format.py
index f5744c521..cfe115034 100644
--- a/lerobot/common/datasets/push_dataset_to_hub/openx_rlds_format.py
+++ b/lerobot/common/datasets/push_dataset_to_hub/openx_rlds_format.py
@@ -42,12 +42,12 @@
from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION
from lerobot.common.datasets.push_dataset_to_hub.openx.transforms import OPENX_STANDARDIZATION_TRANSFORMS
from lerobot.common.datasets.push_dataset_to_hub.utils import (
+ calculate_episode_data_index,
concatenate_episodes,
get_default_encoding,
save_images_concurrently,
)
from lerobot.common.datasets.utils import (
- calculate_episode_data_index,
hf_transform_to_torch,
)
from lerobot.common.datasets.video_utils import VideoFrame, encode_video_frames
diff --git a/lerobot/common/datasets/push_dataset_to_hub/pusht_zarr_format.py b/lerobot/common/datasets/push_dataset_to_hub/pusht_zarr_format.py
index 13d6c837e..27b31ba24 100644
--- a/lerobot/common/datasets/push_dataset_to_hub/pusht_zarr_format.py
+++ b/lerobot/common/datasets/push_dataset_to_hub/pusht_zarr_format.py
@@ -27,12 +27,12 @@
from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION
from lerobot.common.datasets.push_dataset_to_hub.utils import (
+ calculate_episode_data_index,
concatenate_episodes,
get_default_encoding,
save_images_concurrently,
)
from lerobot.common.datasets.utils import (
- calculate_episode_data_index,
hf_transform_to_torch,
)
from lerobot.common.datasets.video_utils import VideoFrame, encode_video_frames
diff --git a/lerobot/common/datasets/push_dataset_to_hub/umi_zarr_format.py b/lerobot/common/datasets/push_dataset_to_hub/umi_zarr_format.py
index d724cf33e..fec893a7f 100644
--- a/lerobot/common/datasets/push_dataset_to_hub/umi_zarr_format.py
+++ b/lerobot/common/datasets/push_dataset_to_hub/umi_zarr_format.py
@@ -28,12 +28,12 @@
from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION
from lerobot.common.datasets.push_dataset_to_hub._umi_imagecodecs_numcodecs import register_codecs
from lerobot.common.datasets.push_dataset_to_hub.utils import (
+ calculate_episode_data_index,
concatenate_episodes,
get_default_encoding,
save_images_concurrently,
)
from lerobot.common.datasets.utils import (
- calculate_episode_data_index,
hf_transform_to_torch,
)
from lerobot.common.datasets.video_utils import VideoFrame, encode_video_frames
diff --git a/lerobot/common/datasets/push_dataset_to_hub/utils.py b/lerobot/common/datasets/push_dataset_to_hub/utils.py
index 97b54e45b..ebcf87f77 100644
--- a/lerobot/common/datasets/push_dataset_to_hub/utils.py
+++ b/lerobot/common/datasets/push_dataset_to_hub/utils.py
@@ -16,7 +16,9 @@
import inspect
from concurrent.futures import ThreadPoolExecutor
from pathlib import Path
+from typing import Dict
+import datasets
import numpy
import PIL
import torch
@@ -72,3 +74,58 @@ def check_repo_id(repo_id: str) -> None:
f"""`repo_id` is expected to contain a community or user id `/` the name of the dataset
(e.g. 'lerobot/pusht'), but contains '{repo_id}'."""
)
+
+
+# TODO(aliberts): remove
+def calculate_episode_data_index(hf_dataset: datasets.Dataset) -> Dict[str, torch.Tensor]:
+ """
+ Calculate episode data index for the provided HuggingFace Dataset. Relies on episode_index column of hf_dataset.
+
+ Parameters:
+ - hf_dataset (datasets.Dataset): A HuggingFace dataset containing the episode index.
+
+ Returns:
+ - episode_data_index: A dictionary containing the data index for each episode. The dictionary has two keys:
+ - "from": A tensor containing the starting index of each episode.
+ - "to": A tensor containing the ending index of each episode.
+ """
+ episode_data_index = {"from": [], "to": []}
+
+ current_episode = None
+ """
+ The episode_index is a list of integers, each representing the episode index of the corresponding example.
+ For instance, the following is a valid episode_index:
+ [0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 2]
+
+ Below, we iterate through the episode_index and populate the episode_data_index dictionary with the starting and
+ ending index of each episode. For the episode_index above, the episode_data_index dictionary will look like this:
+ {
+ "from": [0, 3, 7],
+ "to": [3, 7, 12]
+ }
+ """
+ if len(hf_dataset) == 0:
+ episode_data_index = {
+ "from": torch.tensor([]),
+ "to": torch.tensor([]),
+ }
+ return episode_data_index
+ for idx, episode_idx in enumerate(hf_dataset["episode_index"]):
+ if episode_idx != current_episode:
+ # We encountered a new episode, so we append its starting location to the "from" list
+ episode_data_index["from"].append(idx)
+ # If this is not the first episode, we append the ending location of the previous episode to the "to" list
+ if current_episode is not None:
+ episode_data_index["to"].append(idx)
+ # Let's keep track of the current episode index
+ current_episode = episode_idx
+ else:
+ # We are still in the same episode, so there is nothing for us to do here
+ pass
+ # We have reached the end of the dataset, so we append the ending location of the last episode to the "to" list
+ episode_data_index["to"].append(idx + 1)
+
+ for k in ["from", "to"]:
+ episode_data_index[k] = torch.tensor(episode_data_index[k])
+
+ return episode_data_index
diff --git a/lerobot/common/datasets/push_dataset_to_hub/xarm_pkl_format.py b/lerobot/common/datasets/push_dataset_to_hub/xarm_pkl_format.py
index ad1cb560e..0047e48c3 100644
--- a/lerobot/common/datasets/push_dataset_to_hub/xarm_pkl_format.py
+++ b/lerobot/common/datasets/push_dataset_to_hub/xarm_pkl_format.py
@@ -27,12 +27,12 @@
from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION
from lerobot.common.datasets.push_dataset_to_hub.utils import (
+ calculate_episode_data_index,
concatenate_episodes,
get_default_encoding,
save_images_concurrently,
)
from lerobot.common.datasets.utils import (
- calculate_episode_data_index,
hf_transform_to_torch,
)
from lerobot.common.datasets.video_utils import VideoFrame, encode_video_frames
diff --git a/lerobot/common/datasets/utils.py b/lerobot/common/datasets/utils.py
index d6aef15f5..5f088b118 100644
--- a/lerobot/common/datasets/utils.py
+++ b/lerobot/common/datasets/utils.py
@@ -14,30 +14,56 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import json
-import re
-import warnings
-from functools import cache
+import logging
+import textwrap
+from itertools import accumulate
from pathlib import Path
-from typing import Dict
+from pprint import pformat
+from typing import Any
import datasets
+import jsonlines
+import numpy as np
+import pyarrow.compute as pc
import torch
-from datasets import load_dataset, load_from_disk
-from huggingface_hub import DatasetCard, HfApi, hf_hub_download, snapshot_download
+from datasets.table import embed_table_storage
+from huggingface_hub import DatasetCard, DatasetCardData, HfApi
from PIL import Image as PILImage
-from safetensors.torch import load_file
from torchvision import transforms
+from lerobot.common.robot_devices.robots.utils import Robot
+
+DEFAULT_CHUNK_SIZE = 1000 # Max number of episodes per chunk
+
+INFO_PATH = "meta/info.json"
+EPISODES_PATH = "meta/episodes.jsonl"
+STATS_PATH = "meta/stats.json"
+TASKS_PATH = "meta/tasks.jsonl"
+
+DEFAULT_VIDEO_PATH = "videos/chunk-{episode_chunk:03d}/{video_key}/episode_{episode_index:06d}.mp4"
+DEFAULT_PARQUET_PATH = "data/chunk-{episode_chunk:03d}/episode_{episode_index:06d}.parquet"
+DEFAULT_IMAGE_PATH = "images/{image_key}/episode_{episode_index:06d}/frame_{frame_index:06d}.png"
+
DATASET_CARD_TEMPLATE = """
---
# Metadata will go there
---
This dataset was created using [LeRobot](https://github.com/huggingface/lerobot).
+## {}
+
"""
+DEFAULT_FEATURES = {
+ "timestamp": {"dtype": "float32", "shape": (1,), "names": None},
+ "frame_index": {"dtype": "int64", "shape": (1,), "names": None},
+ "episode_index": {"dtype": "int64", "shape": (1,), "names": None},
+ "index": {"dtype": "int64", "shape": (1,), "names": None},
+ "task_index": {"dtype": "int64", "shape": (1,), "names": None},
+}
+
-def flatten_dict(d, parent_key="", sep="/"):
+def flatten_dict(d: dict, parent_key: str = "", sep: str = "/") -> dict:
"""Flatten a nested dictionary structure by collapsing nested keys into one key with a separator.
For example:
@@ -56,7 +82,7 @@ def flatten_dict(d, parent_key="", sep="/"):
return dict(items)
-def unflatten_dict(d, sep="/"):
+def unflatten_dict(d: dict, sep: str = "/") -> dict:
outdict = {}
for key, value in d.items():
parts = key.split(sep)
@@ -69,6 +95,82 @@ def unflatten_dict(d, sep="/"):
return outdict
+def serialize_dict(stats: dict[str, torch.Tensor | np.ndarray | dict]) -> dict:
+ serialized_dict = {key: value.tolist() for key, value in flatten_dict(stats).items()}
+ return unflatten_dict(serialized_dict)
+
+
+def write_parquet(dataset: datasets.Dataset, fpath: Path) -> None:
+ # Embed image bytes into the table before saving to parquet
+ format = dataset.format
+ dataset = dataset.with_format("arrow")
+ dataset = dataset.map(embed_table_storage, batched=False)
+ dataset = dataset.with_format(**format)
+ dataset.to_parquet(fpath)
+
+
+def load_json(fpath: Path) -> Any:
+ with open(fpath) as f:
+ return json.load(f)
+
+
+def write_json(data: dict, fpath: Path) -> None:
+ fpath.parent.mkdir(exist_ok=True, parents=True)
+ with open(fpath, "w") as f:
+ json.dump(data, f, indent=4, ensure_ascii=False)
+
+
+def load_jsonlines(fpath: Path) -> list[Any]:
+ with jsonlines.open(fpath, "r") as reader:
+ return list(reader)
+
+
+def write_jsonlines(data: dict, fpath: Path) -> None:
+ fpath.parent.mkdir(exist_ok=True, parents=True)
+ with jsonlines.open(fpath, "w") as writer:
+ writer.write_all(data)
+
+
+def append_jsonlines(data: dict, fpath: Path) -> None:
+ fpath.parent.mkdir(exist_ok=True, parents=True)
+ with jsonlines.open(fpath, "a") as writer:
+ writer.write(data)
+
+
+def load_info(local_dir: Path) -> dict:
+ info = load_json(local_dir / INFO_PATH)
+ for ft in info["features"].values():
+ ft["shape"] = tuple(ft["shape"])
+ return info
+
+
+def load_stats(local_dir: Path) -> dict:
+ if not (local_dir / STATS_PATH).exists():
+ return None
+ stats = load_json(local_dir / STATS_PATH)
+ stats = {key: torch.tensor(value) for key, value in flatten_dict(stats).items()}
+ return unflatten_dict(stats)
+
+
+def load_tasks(local_dir: Path) -> dict:
+ tasks = load_jsonlines(local_dir / TASKS_PATH)
+ return {item["task_index"]: item["task"] for item in sorted(tasks, key=lambda x: x["task_index"])}
+
+
+def load_episodes(local_dir: Path) -> dict:
+ return load_jsonlines(local_dir / EPISODES_PATH)
+
+
+def load_image_as_numpy(fpath: str | Path, dtype="float32", channel_first: bool = True) -> np.ndarray:
+ img = PILImage.open(fpath).convert("RGB")
+ img_array = np.array(img, dtype=dtype)
+ if channel_first: # (H, W, C) -> (C, H, W)
+ img_array = np.transpose(img_array, (2, 0, 1))
+ if "float" in dtype:
+ img_array /= 255.0
+ return img_array
+
+
def hf_transform_to_torch(items_dict: dict[torch.Tensor | None]):
"""Get a transform function that convert items from Hugging Face dataset (pyarrow)
to torch tensors. Importantly, images are converted from PIL, which corresponds to
@@ -80,14 +182,6 @@ def hf_transform_to_torch(items_dict: dict[torch.Tensor | None]):
if isinstance(first_item, PILImage.Image):
to_tensor = transforms.ToTensor()
items_dict[key] = [to_tensor(img) for img in items_dict[key]]
- elif isinstance(first_item, str):
- # TODO (michel-aractingi): add str2embedding via language tokenizer
- # For now we leave this part up to the user to choose how to address
- # language conditioned tasks
- pass
- elif isinstance(first_item, dict) and "path" in first_item and "timestamp" in first_item:
- # video frame will be processed downstream
- pass
elif first_item is None:
pass
else:
@@ -95,19 +189,67 @@ def hf_transform_to_torch(items_dict: dict[torch.Tensor | None]):
return items_dict
-@cache
-def get_hf_dataset_safe_version(repo_id: str, version: str) -> str:
+def _get_major_minor(version: str) -> tuple[int]:
+ split = version.strip("v").split(".")
+ return int(split[0]), int(split[1])
+
+
+class BackwardCompatibilityError(Exception):
+ def __init__(self, repo_id, version):
+ message = textwrap.dedent(f"""
+ BackwardCompatibilityError: The dataset you requested ({repo_id}) is in {version} format.
+
+ We introduced a new format since v2.0 which is not backward compatible with v1.x.
+ Please, use our conversion script. Modify the following command with your own task description:
+ ```
+ python lerobot/common/datasets/v2/convert_dataset_v1_to_v2.py \\
+ --repo-id {repo_id} \\
+ --single-task "TASK DESCRIPTION." # <---- /!\\ Replace TASK DESCRIPTION /!\\
+ ```
+
+ A few examples to replace TASK DESCRIPTION: "Pick up the blue cube and place it into the bin.",
+ "Insert the peg into the socket.", "Slide open the ziploc bag.", "Take the elevator to the 1st floor.",
+ "Open the top cabinet, store the pot inside it then close the cabinet.", "Push the T-shaped block onto the T-shaped target.",
+ "Grab the spray paint on the shelf and place it in the bin on top of the robot dog.", "Fold the sweatshirt.", ...
+
+ If you encounter a problem, contact LeRobot maintainers on [Discord](https://discord.com/invite/s3KuuzsPFb)
+ or open an [issue on GitHub](https://github.com/huggingface/lerobot/issues/new/choose).
+ """)
+ super().__init__(message)
+
+
+def check_version_compatibility(
+ repo_id: str, version_to_check: str, current_version: str, enforce_breaking_major: bool = True
+) -> None:
+ current_major, _ = _get_major_minor(current_version)
+ major_to_check, _ = _get_major_minor(version_to_check)
+ if major_to_check < current_major and enforce_breaking_major:
+ raise BackwardCompatibilityError(repo_id, version_to_check)
+ elif float(version_to_check.strip("v")) < float(current_version.strip("v")):
+ logging.warning(
+ f"""The dataset you requested ({repo_id}) was created with a previous version ({version_to_check}) of the
+ codebase. The current codebase version is {current_version}. You should be fine since
+ backward compatibility is maintained. If you encounter a problem, contact LeRobot maintainers on
+ Discord ('https://discord.com/invite/s3KuuzsPFb') or open an issue on github.""",
+ )
+
+
+def get_hub_safe_version(repo_id: str, version: str) -> str:
api = HfApi()
dataset_info = api.list_repo_refs(repo_id, repo_type="dataset")
branches = [b.name for b in dataset_info.branches]
if version not in branches:
- warnings.warn(
+ num_version = float(version.strip("v"))
+ hub_num_versions = [float(v.strip("v")) for v in branches if v.startswith("v")]
+ if num_version >= 2.0 and all(v < 2.0 for v in hub_num_versions):
+ raise BackwardCompatibilityError(repo_id, version)
+
+ logging.warning(
f"""You are trying to load a dataset from {repo_id} created with a previous version of the
codebase. The following versions are available: {branches}.
The requested version ('{version}') is not found. You should be fine since
backward compatibility is maintained. If you encounter a problem, contact LeRobot maintainers on
Discord ('https://discord.com/invite/s3KuuzsPFb') or open an issue on github.""",
- stacklevel=1,
)
if "main" not in branches:
raise ValueError(f"Version 'main' not found on {repo_id}")
@@ -116,275 +258,184 @@ def get_hf_dataset_safe_version(repo_id: str, version: str) -> str:
return version
-def load_hf_dataset(repo_id: str, version: str, root: Path, split: str) -> datasets.Dataset:
- """hf_dataset contains all the observations, states, actions, rewards, etc."""
- if root is not None:
- hf_dataset = load_from_disk(str(Path(root) / repo_id / "train"))
- # TODO(rcadene): clean this which enables getting a subset of dataset
- if split != "train":
- if "%" in split:
- raise NotImplementedError(f"We dont support splitting based on percentage for now ({split}).")
- match_from = re.search(r"train\[(\d+):\]", split)
- match_to = re.search(r"train\[:(\d+)\]", split)
- if match_from:
- from_frame_index = int(match_from.group(1))
- hf_dataset = hf_dataset.select(range(from_frame_index, len(hf_dataset)))
- elif match_to:
- to_frame_index = int(match_to.group(1))
- hf_dataset = hf_dataset.select(range(to_frame_index))
- else:
- raise ValueError(
- f'`split` ({split}) should either be "train", "train[INT:]", or "train[:INT]"'
- )
- else:
- safe_version = get_hf_dataset_safe_version(repo_id, version)
- hf_dataset = load_dataset(repo_id, revision=safe_version, split=split)
-
- hf_dataset.set_transform(hf_transform_to_torch)
- return hf_dataset
-
-
-def load_episode_data_index(repo_id, version, root) -> dict[str, torch.Tensor]:
- """episode_data_index contains the range of indices for each episode
-
- Example:
- ```python
- from_id = episode_data_index["from"][episode_id].item()
- to_id = episode_data_index["to"][episode_id].item()
- episode_frames = [dataset[i] for i in range(from_id, to_id)]
- ```
- """
- if root is not None:
- path = Path(root) / repo_id / "meta_data" / "episode_data_index.safetensors"
- else:
- safe_version = get_hf_dataset_safe_version(repo_id, version)
- path = hf_hub_download(
- repo_id, "meta_data/episode_data_index.safetensors", repo_type="dataset", revision=safe_version
- )
-
- return load_file(path)
-
-
-def load_stats(repo_id, version, root) -> dict[str, dict[str, torch.Tensor]]:
- """stats contains the statistics per modality computed over the full dataset, such as max, min, mean, std
-
- Example:
- ```python
- normalized_action = (action - stats["action"]["mean"]) / stats["action"]["std"]
- ```
- """
- if root is not None:
- path = Path(root) / repo_id / "meta_data" / "stats.safetensors"
- else:
- safe_version = get_hf_dataset_safe_version(repo_id, version)
- path = hf_hub_download(
- repo_id, "meta_data/stats.safetensors", repo_type="dataset", revision=safe_version
- )
+def get_hf_features_from_features(features: dict) -> datasets.Features:
+ hf_features = {}
+ for key, ft in features.items():
+ if ft["dtype"] == "video":
+ continue
+ elif ft["dtype"] == "image":
+ hf_features[key] = datasets.Image()
+ elif ft["shape"] == (1,):
+ hf_features[key] = datasets.Value(dtype=ft["dtype"])
+ else:
+ assert len(ft["shape"]) == 1
+ hf_features[key] = datasets.Sequence(
+ length=ft["shape"][0], feature=datasets.Value(dtype=ft["dtype"])
+ )
- stats = load_file(path)
- return unflatten_dict(stats)
+ return datasets.Features(hf_features)
-def load_info(repo_id, version, root) -> dict:
- """info contains useful information regarding the dataset that are not stored elsewhere
+def get_features_from_robot(robot: Robot, use_videos: bool = True) -> dict:
+ camera_ft = {}
+ if robot.cameras:
+ camera_ft = {
+ key: {"dtype": "video" if use_videos else "image", **ft}
+ for key, ft in robot.camera_features.items()
+ }
+ return {**robot.motor_features, **camera_ft, **DEFAULT_FEATURES}
+
+
+def create_empty_dataset_info(
+ codebase_version: str,
+ fps: int,
+ robot_type: str,
+ features: dict,
+ use_videos: bool,
+) -> dict:
+ return {
+ "codebase_version": codebase_version,
+ "robot_type": robot_type,
+ "total_episodes": 0,
+ "total_frames": 0,
+ "total_tasks": 0,
+ "total_videos": 0,
+ "total_chunks": 0,
+ "chunks_size": DEFAULT_CHUNK_SIZE,
+ "fps": fps,
+ "splits": {},
+ "data_path": DEFAULT_PARQUET_PATH,
+ "video_path": DEFAULT_VIDEO_PATH if use_videos else None,
+ "features": features,
+ }
- Example:
- ```python
- print("frame per second used to collect the video", info["fps"])
- ```
- """
- if root is not None:
- path = Path(root) / repo_id / "meta_data" / "info.json"
- else:
- safe_version = get_hf_dataset_safe_version(repo_id, version)
- path = hf_hub_download(repo_id, "meta_data/info.json", repo_type="dataset", revision=safe_version)
- with open(path) as f:
- info = json.load(f)
- return info
+def get_episode_data_index(
+ episode_dicts: list[dict], episodes: list[int] | None = None
+) -> dict[str, torch.Tensor]:
+ episode_lengths = {ep_idx: ep_dict["length"] for ep_idx, ep_dict in enumerate(episode_dicts)}
+ if episodes is not None:
+ episode_lengths = {ep_idx: episode_lengths[ep_idx] for ep_idx in episodes}
+ cumulative_lenghts = list(accumulate(episode_lengths.values()))
+ return {
+ "from": torch.LongTensor([0] + cumulative_lenghts[:-1]),
+ "to": torch.LongTensor(cumulative_lenghts),
+ }
-def load_videos(repo_id, version, root) -> Path:
- if root is not None:
- path = Path(root) / repo_id / "videos"
- else:
- # TODO(rcadene): we download the whole repo here. see if we can avoid this
- safe_version = get_hf_dataset_safe_version(repo_id, version)
- repo_dir = snapshot_download(repo_id, repo_type="dataset", revision=safe_version)
- path = Path(repo_dir) / "videos"
- return path
+def calculate_total_episode(
+ hf_dataset: datasets.Dataset, raise_if_not_contiguous: bool = True
+) -> dict[str, torch.Tensor]:
+ episode_indices = sorted(hf_dataset.unique("episode_index"))
+ total_episodes = len(episode_indices)
+ if raise_if_not_contiguous and episode_indices != list(range(total_episodes)):
+ raise ValueError("episode_index values are not sorted and contiguous.")
+ return total_episodes
+
+
+def calculate_episode_data_index(hf_dataset: datasets.Dataset) -> dict[str, torch.Tensor]:
+ episode_lengths = []
+ table = hf_dataset.data.table
+ total_episodes = calculate_total_episode(hf_dataset)
+ for ep_idx in range(total_episodes):
+ ep_table = table.filter(pc.equal(table["episode_index"], ep_idx))
+ episode_lengths.insert(ep_idx, len(ep_table))
+
+ cumulative_lenghts = list(accumulate(episode_lengths))
+ return {
+ "from": torch.LongTensor([0] + cumulative_lenghts[:-1]),
+ "to": torch.LongTensor(cumulative_lenghts),
+ }
-def load_previous_and_future_frames(
- item: dict[str, torch.Tensor],
+def check_timestamps_sync(
hf_dataset: datasets.Dataset,
episode_data_index: dict[str, torch.Tensor],
- delta_timestamps: dict[str, list[float]],
+ fps: int,
tolerance_s: float,
-) -> dict[torch.Tensor]:
+ raise_value_error: bool = True,
+) -> bool:
"""
- Given a current item in the dataset containing a timestamp (e.g. 0.6 seconds), and a list of time differences of
- some modalities (e.g. delta_timestamps={"observation.image": [-0.8, -0.2, 0, 0.2]}), this function computes for each
- given modality (e.g. "observation.image") a list of query timestamps (e.g. [-0.2, 0.4, 0.6, 0.8]) and loads the closest
- frames in the dataset.
-
- Importantly, when no frame can be found around a query timestamp within a specified tolerance window, this function
- raises an AssertionError. When a timestamp is queried before the first available timestamp of the episode or after
- the last available timestamp, the violation of the tolerance doesnt raise an AssertionError, and the function
- populates a boolean array indicating which frames are outside of the episode range. For instance, this boolean array
- is useful during batched training to not supervise actions associated to timestamps coming after the end of the
- episode, or to pad the observations in a specific way. Note that by default the observation frames before the start
- of the episode are the same as the first frame of the episode.
-
- Parameters:
- - item (dict): A dictionary containing all the data related to a frame. It is the result of `dataset[idx]`. Each key
- corresponds to a different modality (e.g., "timestamp", "observation.image", "action").
- - hf_dataset (datasets.Dataset): A dictionary containing the full dataset. Each key corresponds to a different
- modality (e.g., "timestamp", "observation.image", "action").
- - episode_data_index (dict): A dictionary containing two keys ("from" and "to") associated to dataset indices.
- They indicate the start index and end index of each episode in the dataset.
- - delta_timestamps (dict): A dictionary containing lists of delta timestamps for each possible modality to be
- retrieved. These deltas are added to the item timestamp to form the query timestamps.
- - tolerance_s (float, optional): The tolerance level (in seconds) used to determine if a data point is close enough to the query
- timestamp by asserting `tol > difference`. It is suggested to set `tol` to a smaller value than the
- smallest expected inter-frame period, but large enough to account for jitter.
-
- Returns:
- - The same item with the queried frames for each modality specified in delta_timestamps, with an additional key for
- each modality (e.g. "observation.image_is_pad").
-
- Raises:
- - AssertionError: If any of the frames unexpectedly violate the tolerance level. This could indicate synchronization
- issues with timestamps during data collection.
+ This check is to make sure that each timestamps is separated to the next by 1/fps +/- tolerance to
+ account for possible numerical error.
"""
- # get indices of the frames associated to the episode, and their timestamps
- ep_id = item["episode_index"].item()
- ep_data_id_from = episode_data_index["from"][ep_id].item()
- ep_data_id_to = episode_data_index["to"][ep_id].item()
- ep_data_ids = torch.arange(ep_data_id_from, ep_data_id_to, 1)
-
- # load timestamps
- ep_timestamps = hf_dataset.select_columns("timestamp")[ep_data_id_from:ep_data_id_to]["timestamp"]
- ep_timestamps = torch.stack(ep_timestamps)
-
- # we make the assumption that the timestamps are sorted
- ep_first_ts = ep_timestamps[0]
- ep_last_ts = ep_timestamps[-1]
- current_ts = item["timestamp"].item()
-
- for key in delta_timestamps:
- # get timestamps used as query to retrieve data of previous/future frames
- delta_ts = delta_timestamps[key]
- query_ts = current_ts + torch.tensor(delta_ts)
-
- # compute distances between each query timestamp and all timestamps of all the frames belonging to the episode
- dist = torch.cdist(query_ts[:, None], ep_timestamps[:, None], p=1)
- min_, argmin_ = dist.min(1)
-
- # TODO(rcadene): synchronize timestamps + interpolation if needed
-
- is_pad = min_ > tolerance_s
-
- # check violated query timestamps are all outside the episode range
- assert ((query_ts[is_pad] < ep_first_ts) | (ep_last_ts < query_ts[is_pad])).all(), (
- f"One or several timestamps unexpectedly violate the tolerance ({min_} > {tolerance_s=}) inside episode range."
- "This might be due to synchronization issues with timestamps during data collection."
- )
-
- # get dataset indices corresponding to frames to be loaded
- data_ids = ep_data_ids[argmin_]
-
- # load frames modality
- item[key] = hf_dataset.select_columns(key)[data_ids][key]
-
- if isinstance(item[key][0], dict) and "path" in item[key][0]:
- # video mode where frame are expressed as dict of path and timestamp
- item[key] = item[key]
- else:
- item[key] = torch.stack(item[key])
-
- item[f"{key}_is_pad"] = is_pad
-
- return item
-
-
-def calculate_episode_data_index(hf_dataset: datasets.Dataset) -> Dict[str, torch.Tensor]:
+ timestamps = torch.stack(hf_dataset["timestamp"])
+ diffs = torch.diff(timestamps)
+ within_tolerance = torch.abs(diffs - 1 / fps) <= tolerance_s
+
+ # We mask differences between the timestamp at the end of an episode
+ # and the one at the start of the next episode since these are expected
+ # to be outside tolerance.
+ mask = torch.ones(len(diffs), dtype=torch.bool)
+ ignored_diffs = episode_data_index["to"][:-1] - 1
+ mask[ignored_diffs] = False
+ filtered_within_tolerance = within_tolerance[mask]
+
+ if not torch.all(filtered_within_tolerance):
+ # Track original indices before masking
+ original_indices = torch.arange(len(diffs))
+ filtered_indices = original_indices[mask]
+ outside_tolerance_filtered_indices = torch.nonzero(~filtered_within_tolerance) # .squeeze()
+ outside_tolerance_indices = filtered_indices[outside_tolerance_filtered_indices]
+ episode_indices = torch.stack(hf_dataset["episode_index"])
+
+ outside_tolerances = []
+ for idx in outside_tolerance_indices:
+ entry = {
+ "timestamps": [timestamps[idx], timestamps[idx + 1]],
+ "diff": diffs[idx],
+ "episode_index": episode_indices[idx].item(),
+ }
+ outside_tolerances.append(entry)
+
+ if raise_value_error:
+ raise ValueError(
+ f"""One or several timestamps unexpectedly violate the tolerance inside episode range.
+ This might be due to synchronization issues with timestamps during data collection.
+ \n{pformat(outside_tolerances)}"""
+ )
+ return False
+
+ return True
+
+
+def check_delta_timestamps(
+ delta_timestamps: dict[str, list[float]], fps: int, tolerance_s: float, raise_value_error: bool = True
+) -> bool:
+ """This will check if all the values in delta_timestamps are multiples of 1/fps +/- tolerance.
+ This is to ensure that these delta_timestamps added to any timestamp from a dataset will themselves be
+ actual timestamps from the dataset.
"""
- Calculate episode data index for the provided HuggingFace Dataset. Relies on episode_index column of hf_dataset.
+ outside_tolerance = {}
+ for key, delta_ts in delta_timestamps.items():
+ within_tolerance = [abs(ts * fps - round(ts * fps)) / fps <= tolerance_s for ts in delta_ts]
+ if not all(within_tolerance):
+ outside_tolerance[key] = [
+ ts for ts, is_within in zip(delta_ts, within_tolerance, strict=True) if not is_within
+ ]
- Parameters:
- - hf_dataset (datasets.Dataset): A HuggingFace dataset containing the episode index.
+ if len(outside_tolerance) > 0:
+ if raise_value_error:
+ raise ValueError(
+ f"""
+ The following delta_timestamps are found outside of tolerance range.
+ Please make sure they are multiples of 1/{fps} +/- tolerance and adjust
+ their values accordingly.
+ \n{pformat(outside_tolerance)}
+ """
+ )
+ return False
- Returns:
- - episode_data_index: A dictionary containing the data index for each episode. The dictionary has two keys:
- - "from": A tensor containing the starting index of each episode.
- - "to": A tensor containing the ending index of each episode.
- """
- episode_data_index = {"from": [], "to": []}
+ return True
- current_episode = None
- """
- The episode_index is a list of integers, each representing the episode index of the corresponding example.
- For instance, the following is a valid episode_index:
- [0, 0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 2]
-
- Below, we iterate through the episode_index and populate the episode_data_index dictionary with the starting and
- ending index of each episode. For the episode_index above, the episode_data_index dictionary will look like this:
- {
- "from": [0, 3, 7],
- "to": [3, 7, 12]
- }
- """
- if len(hf_dataset) == 0:
- episode_data_index = {
- "from": torch.tensor([]),
- "to": torch.tensor([]),
- }
- return episode_data_index
- for idx, episode_idx in enumerate(hf_dataset["episode_index"]):
- if episode_idx != current_episode:
- # We encountered a new episode, so we append its starting location to the "from" list
- episode_data_index["from"].append(idx)
- # If this is not the first episode, we append the ending location of the previous episode to the "to" list
- if current_episode is not None:
- episode_data_index["to"].append(idx)
- # Let's keep track of the current episode index
- current_episode = episode_idx
- else:
- # We are still in the same episode, so there is nothing for us to do here
- pass
- # We have reached the end of the dataset, so we append the ending location of the last episode to the "to" list
- episode_data_index["to"].append(idx + 1)
- for k in ["from", "to"]:
- episode_data_index[k] = torch.tensor(episode_data_index[k])
+def get_delta_indices(delta_timestamps: dict[str, list[float]], fps: int) -> dict[str, list[int]]:
+ delta_indices = {}
+ for key, delta_ts in delta_timestamps.items():
+ delta_indices[key] = (torch.tensor(delta_ts) * fps).long().tolist()
- return episode_data_index
-
-
-def reset_episode_index(hf_dataset: datasets.Dataset) -> datasets.Dataset:
- """Reset the `episode_index` of the provided HuggingFace Dataset.
-
- `episode_data_index` (and related functionality such as `load_previous_and_future_frames`) requires the
- `episode_index` to be sorted, continuous (1,1,1 and not 1,2,1) and start at 0.
-
- This brings the `episode_index` to the required format.
- """
- if len(hf_dataset) == 0:
- return hf_dataset
- unique_episode_idxs = torch.stack(hf_dataset["episode_index"]).unique().tolist()
- episode_idx_to_reset_idx_mapping = {
- ep_id: reset_ep_id for reset_ep_id, ep_id in enumerate(unique_episode_idxs)
- }
-
- def modify_ep_idx_func(example):
- example["episode_index"] = episode_idx_to_reset_idx_mapping[example["episode_index"].item()]
- return example
-
- hf_dataset = hf_dataset.map(modify_ep_idx_func)
-
- return hf_dataset
+ return delta_indices
def cycle(iterable):
@@ -400,7 +451,7 @@ def cycle(iterable):
iterator = iter(iterable)
-def create_branch(repo_id, *, branch: str, repo_type: str | None = None):
+def create_branch(repo_id, *, branch: str, repo_type: str | None = None) -> None:
"""Create a branch on a existing Hugging Face repo. Delete the branch if it already
exists before creating it.
"""
@@ -415,12 +466,35 @@ def create_branch(repo_id, *, branch: str, repo_type: str | None = None):
api.create_branch(repo_id, repo_type=repo_type, branch=branch)
-def create_lerobot_dataset_card(tags: list | None = None, text: str | None = None) -> DatasetCard:
- card = DatasetCard(DATASET_CARD_TEMPLATE)
- card.data.task_categories = ["robotics"]
- card.data.tags = ["LeRobot"]
- if tags is not None:
- card.data.tags += tags
- if text is not None:
- card.text += text
- return card
+def create_lerobot_dataset_card(
+ tags: list | None = None,
+ dataset_info: dict | None = None,
+ **kwargs,
+) -> DatasetCard:
+ """
+ Keyword arguments will be used to replace values in ./lerobot/common/datasets/card_template.md.
+ Note: If specified, license must be one of https://huggingface.co/docs/hub/repositories-licenses.
+ """
+ card_tags = ["LeRobot"]
+ if tags:
+ card_tags += tags
+ if dataset_info:
+ dataset_structure = "[meta/info.json](meta/info.json):\n"
+ dataset_structure += f"```json\n{json.dumps(dataset_info, indent=4)}\n```\n"
+ kwargs = {**kwargs, "dataset_structure": dataset_structure}
+ card_data = DatasetCardData(
+ license=kwargs.get("license"),
+ tags=card_tags,
+ task_categories=["robotics"],
+ configs=[
+ {
+ "config_name": "default",
+ "data_files": "data/*/*.parquet",
+ }
+ ],
+ )
+ return DatasetCard.from_template(
+ card_data=card_data,
+ template_path="./lerobot/common/datasets/card_template.md",
+ **kwargs,
+ )
diff --git a/lerobot/common/datasets/v2/batch_convert_dataset_v1_to_v2.py b/lerobot/common/datasets/v2/batch_convert_dataset_v1_to_v2.py
new file mode 100644
index 000000000..c8da2fe14
--- /dev/null
+++ b/lerobot/common/datasets/v2/batch_convert_dataset_v1_to_v2.py
@@ -0,0 +1,882 @@
+#!/usr/bin/env python
+
+# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+This script is for internal use to convert all datasets under the 'lerobot' hub user account to v2.
+
+Note: Since the original Aloha datasets don't use shadow motors, you need to comment those out in
+lerobot/configs/robot/aloha.yaml before running this script.
+"""
+
+import traceback
+from pathlib import Path
+from textwrap import dedent
+
+from lerobot import available_datasets
+from lerobot.common.datasets.v2.convert_dataset_v1_to_v2 import convert_dataset, parse_robot_config
+
+LOCAL_DIR = Path("data/")
+
+ALOHA_CONFIG = Path("lerobot/configs/robot/aloha.yaml")
+ALOHA_MOBILE_INFO = {
+ "robot_config": parse_robot_config(ALOHA_CONFIG),
+ "license": "mit",
+ "url": "https://mobile-aloha.github.io/",
+ "paper": "https://arxiv.org/abs/2401.02117",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{fu2024mobile,
+ author = {Fu, Zipeng and Zhao, Tony Z. and Finn, Chelsea},
+ title = {Mobile ALOHA: Learning Bimanual Mobile Manipulation with Low-Cost Whole-Body Teleoperation},
+ booktitle = {arXiv},
+ year = {2024},
+ }""").lstrip(),
+}
+ALOHA_STATIC_INFO = {
+ "robot_config": parse_robot_config(ALOHA_CONFIG),
+ "license": "mit",
+ "url": "https://tonyzhaozh.github.io/aloha/",
+ "paper": "https://arxiv.org/abs/2304.13705",
+ "citation_bibtex": dedent(r"""
+ @article{Zhao2023LearningFB,
+ title={Learning Fine-Grained Bimanual Manipulation with Low-Cost Hardware},
+ author={Tony Zhao and Vikash Kumar and Sergey Levine and Chelsea Finn},
+ journal={RSS},
+ year={2023},
+ volume={abs/2304.13705},
+ url={https://arxiv.org/abs/2304.13705}
+ }""").lstrip(),
+}
+PUSHT_INFO = {
+ "license": "mit",
+ "url": "https://diffusion-policy.cs.columbia.edu/",
+ "paper": "https://arxiv.org/abs/2303.04137v5",
+ "citation_bibtex": dedent(r"""
+ @article{chi2024diffusionpolicy,
+ author = {Cheng Chi and Zhenjia Xu and Siyuan Feng and Eric Cousineau and Yilun Du and Benjamin Burchfiel and Russ Tedrake and Shuran Song},
+ title ={Diffusion Policy: Visuomotor Policy Learning via Action Diffusion},
+ journal = {The International Journal of Robotics Research},
+ year = {2024},
+ }""").lstrip(),
+}
+XARM_INFO = {
+ "license": "mit",
+ "url": "https://www.nicklashansen.com/td-mpc/",
+ "paper": "https://arxiv.org/abs/2203.04955",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{Hansen2022tdmpc,
+ title={Temporal Difference Learning for Model Predictive Control},
+ author={Nicklas Hansen and Xiaolong Wang and Hao Su},
+ booktitle={ICML},
+ year={2022}
+ }
+ """),
+}
+UNITREEH_INFO = {
+ "license": "apache-2.0",
+}
+
+DATASETS = {
+ "aloha_mobile_cabinet": {
+ "single_task": "Open the top cabinet, store the pot inside it then close the cabinet.",
+ **ALOHA_MOBILE_INFO,
+ },
+ "aloha_mobile_chair": {
+ "single_task": "Push the chairs in front of the desk to place them against it.",
+ **ALOHA_MOBILE_INFO,
+ },
+ "aloha_mobile_elevator": {
+ "single_task": "Take the elevator to the 1st floor.",
+ **ALOHA_MOBILE_INFO,
+ },
+ "aloha_mobile_shrimp": {
+ "single_task": "Sauté the raw shrimp on both sides, then serve it in the bowl.",
+ **ALOHA_MOBILE_INFO,
+ },
+ "aloha_mobile_wash_pan": {
+ "single_task": "Pick up the pan, rinse it in the sink and then place it in the drying rack.",
+ **ALOHA_MOBILE_INFO,
+ },
+ "aloha_mobile_wipe_wine": {
+ "single_task": "Pick up the wet cloth on the faucet and use it to clean the spilled wine on the table and underneath the glass.",
+ **ALOHA_MOBILE_INFO,
+ },
+ "aloha_static_battery": {
+ "single_task": "Place the battery into the slot of the remote controller.",
+ **ALOHA_STATIC_INFO,
+ },
+ "aloha_static_candy": {"single_task": "Pick up the candy and unwrap it.", **ALOHA_STATIC_INFO},
+ "aloha_static_coffee": {
+ "single_task": "Place the coffee capsule inside the capsule container, then place the cup onto the center of the cup tray, then push the 'Hot Water' and 'Travel Mug' buttons.",
+ **ALOHA_STATIC_INFO,
+ },
+ "aloha_static_coffee_new": {
+ "single_task": "Place the coffee capsule inside the capsule container, then place the cup onto the center of the cup tray.",
+ **ALOHA_STATIC_INFO,
+ },
+ "aloha_static_cups_open": {
+ "single_task": "Pick up the plastic cup and open its lid.",
+ **ALOHA_STATIC_INFO,
+ },
+ "aloha_static_fork_pick_up": {
+ "single_task": "Pick up the fork and place it on the plate.",
+ **ALOHA_STATIC_INFO,
+ },
+ "aloha_static_pingpong_test": {
+ "single_task": "Transfer one of the two balls in the right glass into the left glass, then transfer it back to the right glass.",
+ **ALOHA_STATIC_INFO,
+ },
+ "aloha_static_pro_pencil": {
+ "single_task": "Pick up the pencil with the right arm, hand it over to the left arm then place it back onto the table.",
+ **ALOHA_STATIC_INFO,
+ },
+ "aloha_static_screw_driver": {
+ "single_task": "Pick up the screwdriver with the right arm, hand it over to the left arm then place it into the cup.",
+ **ALOHA_STATIC_INFO,
+ },
+ "aloha_static_tape": {
+ "single_task": "Cut a small piece of tape from the tape dispenser then place it on the cardboard box's edge.",
+ **ALOHA_STATIC_INFO,
+ },
+ "aloha_static_thread_velcro": {
+ "single_task": "Pick up the velcro cable tie with the left arm, then insert the end of the velcro tie into the other end's loop with the right arm.",
+ **ALOHA_STATIC_INFO,
+ },
+ "aloha_static_towel": {
+ "single_task": "Pick up a piece of paper towel and place it on the spilled liquid.",
+ **ALOHA_STATIC_INFO,
+ },
+ "aloha_static_vinh_cup": {
+ "single_task": "Pick up the platic cup with the right arm, then pop its lid open with the left arm.",
+ **ALOHA_STATIC_INFO,
+ },
+ "aloha_static_vinh_cup_left": {
+ "single_task": "Pick up the platic cup with the left arm, then pop its lid open with the right arm.",
+ **ALOHA_STATIC_INFO,
+ },
+ "aloha_static_ziploc_slide": {"single_task": "Slide open the ziploc bag.", **ALOHA_STATIC_INFO},
+ "aloha_sim_insertion_scripted": {"single_task": "Insert the peg into the socket.", **ALOHA_STATIC_INFO},
+ "aloha_sim_insertion_scripted_image": {
+ "single_task": "Insert the peg into the socket.",
+ **ALOHA_STATIC_INFO,
+ },
+ "aloha_sim_insertion_human": {"single_task": "Insert the peg into the socket.", **ALOHA_STATIC_INFO},
+ "aloha_sim_insertion_human_image": {
+ "single_task": "Insert the peg into the socket.",
+ **ALOHA_STATIC_INFO,
+ },
+ "aloha_sim_transfer_cube_scripted": {
+ "single_task": "Pick up the cube with the right arm and transfer it to the left arm.",
+ **ALOHA_STATIC_INFO,
+ },
+ "aloha_sim_transfer_cube_scripted_image": {
+ "single_task": "Pick up the cube with the right arm and transfer it to the left arm.",
+ **ALOHA_STATIC_INFO,
+ },
+ "aloha_sim_transfer_cube_human": {
+ "single_task": "Pick up the cube with the right arm and transfer it to the left arm.",
+ **ALOHA_STATIC_INFO,
+ },
+ "aloha_sim_transfer_cube_human_image": {
+ "single_task": "Pick up the cube with the right arm and transfer it to the left arm.",
+ **ALOHA_STATIC_INFO,
+ },
+ "pusht": {"single_task": "Push the T-shaped block onto the T-shaped target.", **PUSHT_INFO},
+ "pusht_image": {"single_task": "Push the T-shaped block onto the T-shaped target.", **PUSHT_INFO},
+ "unitreeh1_fold_clothes": {"single_task": "Fold the sweatshirt.", **UNITREEH_INFO},
+ "unitreeh1_rearrange_objects": {"single_task": "Put the object into the bin.", **UNITREEH_INFO},
+ "unitreeh1_two_robot_greeting": {
+ "single_task": "Greet the other robot with a high five.",
+ **UNITREEH_INFO,
+ },
+ "unitreeh1_warehouse": {
+ "single_task": "Grab the spray paint on the shelf and place it in the bin on top of the robot dog.",
+ **UNITREEH_INFO,
+ },
+ "xarm_lift_medium": {"single_task": "Pick up the cube and lift it.", **XARM_INFO},
+ "xarm_lift_medium_image": {"single_task": "Pick up the cube and lift it.", **XARM_INFO},
+ "xarm_lift_medium_replay": {"single_task": "Pick up the cube and lift it.", **XARM_INFO},
+ "xarm_lift_medium_replay_image": {"single_task": "Pick up the cube and lift it.", **XARM_INFO},
+ "xarm_push_medium": {"single_task": "Push the cube onto the target.", **XARM_INFO},
+ "xarm_push_medium_image": {"single_task": "Push the cube onto the target.", **XARM_INFO},
+ "xarm_push_medium_replay": {"single_task": "Push the cube onto the target.", **XARM_INFO},
+ "xarm_push_medium_replay_image": {"single_task": "Push the cube onto the target.", **XARM_INFO},
+ "umi_cup_in_the_wild": {
+ "single_task": "Put the cup on the plate.",
+ "license": "apache-2.0",
+ },
+ "asu_table_top": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "paper": "https://link.springer.com/article/10.1007/s10514-023-10129-1",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{zhou2023modularity,
+ title={Modularity through Attention: Efficient Training and Transfer of Language-Conditioned Policies for Robot Manipulation},
+ author={Zhou, Yifan and Sonawani, Shubham and Phielipp, Mariano and Stepputtis, Simon and Amor, Heni},
+ booktitle={Conference on Robot Learning},
+ pages={1684--1695},
+ year={2023},
+ organization={PMLR}
+ }
+ @article{zhou2023learning,
+ title={Learning modular language-conditioned robot policies through attention},
+ author={Zhou, Yifan and Sonawani, Shubham and Phielipp, Mariano and Ben Amor, Heni and Stepputtis, Simon},
+ journal={Autonomous Robots},
+ pages={1--21},
+ year={2023},
+ publisher={Springer}
+ }""").lstrip(),
+ },
+ "austin_buds_dataset": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://ut-austin-rpl.github.io/BUDS-website/",
+ "paper": "https://arxiv.org/abs/2109.13841",
+ "citation_bibtex": dedent(r"""
+ @article{zhu2022bottom,
+ title={Bottom-Up Skill Discovery From Unsegmented Demonstrations for Long-Horizon Robot Manipulation},
+ author={Zhu, Yifeng and Stone, Peter and Zhu, Yuke},
+ journal={IEEE Robotics and Automation Letters},
+ volume={7},
+ number={2},
+ pages={4126--4133},
+ year={2022},
+ publisher={IEEE}
+ }""").lstrip(),
+ },
+ "austin_sailor_dataset": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://ut-austin-rpl.github.io/sailor/",
+ "paper": "https://arxiv.org/abs/2210.11435",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{nasiriany2022sailor,
+ title={Learning and Retrieval from Prior Data for Skill-based Imitation Learning},
+ author={Soroush Nasiriany and Tian Gao and Ajay Mandlekar and Yuke Zhu},
+ booktitle={Conference on Robot Learning (CoRL)},
+ year={2022}
+ }""").lstrip(),
+ },
+ "austin_sirius_dataset": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://ut-austin-rpl.github.io/sirius/",
+ "paper": "https://arxiv.org/abs/2211.08416",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{liu2022robot,
+ title = {Robot Learning on the Job: Human-in-the-Loop Autonomy and Learning During Deployment},
+ author = {Huihan Liu and Soroush Nasiriany and Lance Zhang and Zhiyao Bao and Yuke Zhu},
+ booktitle = {Robotics: Science and Systems (RSS)},
+ year = {2023}
+ }""").lstrip(),
+ },
+ "berkeley_autolab_ur5": {
+ "tasks_col": "language_instruction",
+ "license": "cc-by-4.0",
+ "url": "https://sites.google.com/view/berkeley-ur5/home",
+ "citation_bibtex": dedent(r"""
+ @misc{BerkeleyUR5Website,
+ title = {Berkeley {UR5} Demonstration Dataset},
+ author = {Lawrence Yunliang Chen and Simeon Adebola and Ken Goldberg},
+ howpublished = {https://sites.google.com/view/berkeley-ur5/home},
+ }""").lstrip(),
+ },
+ "berkeley_cable_routing": {
+ "tasks_col": "language_instruction",
+ "license": "cc-by-4.0",
+ "url": "https://sites.google.com/view/cablerouting/home",
+ "paper": "https://arxiv.org/abs/2307.08927",
+ "citation_bibtex": dedent(r"""
+ @article{luo2023multistage,
+ author = {Jianlan Luo and Charles Xu and Xinyang Geng and Gilbert Feng and Kuan Fang and Liam Tan and Stefan Schaal and Sergey Levine},
+ title = {Multi-Stage Cable Routing through Hierarchical Imitation Learning},
+ journal = {arXiv pre-print},
+ year = {2023},
+ url = {https://arxiv.org/abs/2307.08927},
+ }""").lstrip(),
+ },
+ "berkeley_fanuc_manipulation": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://sites.google.com/berkeley.edu/fanuc-manipulation",
+ "citation_bibtex": dedent(r"""
+ @article{fanuc_manipulation2023,
+ title={Fanuc Manipulation: A Dataset for Learning-based Manipulation with FANUC Mate 200iD Robot},
+ author={Zhu, Xinghao and Tian, Ran and Xu, Chenfeng and Ding, Mingyu and Zhan, Wei and Tomizuka, Masayoshi},
+ year={2023},
+ }""").lstrip(),
+ },
+ "berkeley_gnm_cory_hall": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "paper": "https://arxiv.org/abs/1709.10489",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{kahn2018self,
+ title={Self-supervised deep reinforcement learning with generalized computation graphs for robot navigation},
+ author={Kahn, Gregory and Villaflor, Adam and Ding, Bosen and Abbeel, Pieter and Levine, Sergey},
+ booktitle={2018 IEEE international conference on robotics and automation (ICRA)},
+ pages={5129--5136},
+ year={2018},
+ organization={IEEE}
+ }""").lstrip(),
+ },
+ "berkeley_gnm_recon": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://sites.google.com/view/recon-robot",
+ "paper": "https://arxiv.org/abs/2104.05859",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{shah2021rapid,
+ title={Rapid Exploration for Open-World Navigation with Latent Goal Models},
+ author={Dhruv Shah and Benjamin Eysenbach and Nicholas Rhinehart and Sergey Levine},
+ booktitle={5th Annual Conference on Robot Learning },
+ year={2021},
+ url={https://openreview.net/forum?id=d_SWJhyKfVw}
+ }""").lstrip(),
+ },
+ "berkeley_gnm_sac_son": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://sites.google.com/view/SACSoN-review",
+ "paper": "https://arxiv.org/abs/2306.01874",
+ "citation_bibtex": dedent(r"""
+ @article{hirose2023sacson,
+ title={SACSoN: Scalable Autonomous Data Collection for Social Navigation},
+ author={Hirose, Noriaki and Shah, Dhruv and Sridhar, Ajay and Levine, Sergey},
+ journal={arXiv preprint arXiv:2306.01874},
+ year={2023}
+ }""").lstrip(),
+ },
+ "berkeley_mvp": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "paper": "https://arxiv.org/abs/2203.06173",
+ "citation_bibtex": dedent(r"""
+ @InProceedings{Radosavovic2022,
+ title = {Real-World Robot Learning with Masked Visual Pre-training},
+ author = {Ilija Radosavovic and Tete Xiao and Stephen James and Pieter Abbeel and Jitendra Malik and Trevor Darrell},
+ booktitle = {CoRL},
+ year = {2022}
+ }""").lstrip(),
+ },
+ "berkeley_rpt": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "paper": "https://arxiv.org/abs/2306.10007",
+ "citation_bibtex": dedent(r"""
+ @article{Radosavovic2023,
+ title={Robot Learning with Sensorimotor Pre-training},
+ author={Ilija Radosavovic and Baifeng Shi and Letian Fu and Ken Goldberg and Trevor Darrell and Jitendra Malik},
+ year={2023},
+ journal={arXiv:2306.10007}
+ }""").lstrip(),
+ },
+ "cmu_franka_exploration_dataset": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://human-world-model.github.io/",
+ "paper": "https://arxiv.org/abs/2308.10901",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{mendonca2023structured,
+ title={Structured World Models from Human Videos},
+ author={Mendonca, Russell and Bahl, Shikhar and Pathak, Deepak},
+ journal={RSS},
+ year={2023}
+ }""").lstrip(),
+ },
+ "cmu_play_fusion": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://play-fusion.github.io/",
+ "paper": "https://arxiv.org/abs/2312.04549",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{chen2023playfusion,
+ title={PlayFusion: Skill Acquisition via Diffusion from Language-Annotated Play},
+ author={Chen, Lili and Bahl, Shikhar and Pathak, Deepak},
+ booktitle={CoRL},
+ year={2023}
+ }""").lstrip(),
+ },
+ "cmu_stretch": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://robo-affordances.github.io/",
+ "paper": "https://arxiv.org/abs/2304.08488",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{bahl2023affordances,
+ title={Affordances from Human Videos as a Versatile Representation for Robotics},
+ author={Bahl, Shikhar and Mendonca, Russell and Chen, Lili and Jain, Unnat and Pathak, Deepak},
+ booktitle={CVPR},
+ year={2023}
+ }
+ @article{mendonca2023structured,
+ title={Structured World Models from Human Videos},
+ author={Mendonca, Russell and Bahl, Shikhar and Pathak, Deepak},
+ journal={CoRL},
+ year={2023}
+ }""").lstrip(),
+ },
+ "columbia_cairlab_pusht_real": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://diffusion-policy.cs.columbia.edu/",
+ "paper": "https://arxiv.org/abs/2303.04137v5",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{chi2023diffusionpolicy,
+ title={Diffusion Policy: Visuomotor Policy Learning via Action Diffusion},
+ author={Chi, Cheng and Feng, Siyuan and Du, Yilun and Xu, Zhenjia and Cousineau, Eric and Burchfiel, Benjamin and Song, Shuran},
+ booktitle={Proceedings of Robotics: Science and Systems (RSS)},
+ year={2023}
+ }""").lstrip(),
+ },
+ "conq_hose_manipulation": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://sites.google.com/view/conq-hose-manipulation-dataset/home",
+ "citation_bibtex": dedent(r"""
+ @misc{ConqHoseManipData,
+ author={Peter Mitrano and Dmitry Berenson},
+ title={Conq Hose Manipulation Dataset, v1.15.0},
+ year={2024},
+ howpublished={https://sites.google.com/view/conq-hose-manipulation-dataset}
+ }""").lstrip(),
+ },
+ "dlr_edan_shared_control": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "paper": "https://ieeexplore.ieee.org/document/9341156",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{vogel_edan_2020,
+ title = {EDAN - an EMG-Controlled Daily Assistant to Help People with Physical Disabilities},
+ language = {en},
+ booktitle = {2020 {IEEE}/{RSJ} {International} {Conference} on {Intelligent} {Robots} and {Systems} ({IROS})},
+ author = {Vogel, Jörn and Hagengruber, Annette and Iskandar, Maged and Quere, Gabriel and Leipscher, Ulrike and Bustamante, Samuel and Dietrich, Alexander and Hoeppner, Hannes and Leidner, Daniel and Albu-Schäffer, Alin},
+ year = {2020}
+ }
+ @inproceedings{quere_shared_2020,
+ address = {Paris, France},
+ title = {Shared {Control} {Templates} for {Assistive} {Robotics}},
+ language = {en},
+ booktitle = {2020 {IEEE} {International} {Conference} on {Robotics} and {Automation} ({ICRA})},
+ author = {Quere, Gabriel and Hagengruber, Annette and Iskandar, Maged and Bustamante, Samuel and Leidner, Daniel and Stulp, Freek and Vogel, Joern},
+ year = {2020},
+ pages = {7},
+ }""").lstrip(),
+ },
+ "dlr_sara_grid_clamp": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "paper": "https://www.researchsquare.com/article/rs-3289569/v1",
+ "citation_bibtex": dedent(r"""
+ @article{padalkar2023guided,
+ title={A guided reinforcement learning approach using shared control templates for learning manipulation skills in the real world},
+ author={Padalkar, Abhishek and Quere, Gabriel and Raffin, Antonin and Silv{\'e}rio, Jo{\~a}o and Stulp, Freek},
+ journal={Research square preprint rs-3289569/v1},
+ year={2023}
+ }""").lstrip(),
+ },
+ "dlr_sara_pour": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "paper": "https://elib.dlr.de/193739/1/padalkar2023rlsct.pdf",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{padalkar2023guiding,
+ title={Guiding Reinforcement Learning with Shared Control Templates},
+ author={Padalkar, Abhishek and Quere, Gabriel and Steinmetz, Franz and Raffin, Antonin and Nieuwenhuisen, Matthias and Silv{\'e}rio, Jo{\~a}o and Stulp, Freek},
+ booktitle={40th IEEE International Conference on Robotics and Automation, ICRA 2023},
+ year={2023},
+ organization={IEEE}
+ }""").lstrip(),
+ },
+ "droid_100": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://droid-dataset.github.io/",
+ "paper": "https://arxiv.org/abs/2403.12945",
+ "citation_bibtex": dedent(r"""
+ @article{khazatsky2024droid,
+ title = {DROID: A Large-Scale In-The-Wild Robot Manipulation Dataset},
+ author = {Alexander Khazatsky and Karl Pertsch and Suraj Nair and Ashwin Balakrishna and Sudeep Dasari and Siddharth Karamcheti and Soroush Nasiriany and Mohan Kumar Srirama and Lawrence Yunliang Chen and Kirsty Ellis and Peter David Fagan and Joey Hejna and Masha Itkina and Marion Lepert and Yecheng Jason Ma and Patrick Tree Miller and Jimmy Wu and Suneel Belkhale and Shivin Dass and Huy Ha and Arhan Jain and Abraham Lee and Youngwoon Lee and Marius Memmel and Sungjae Park and Ilija Radosavovic and Kaiyuan Wang and Albert Zhan and Kevin Black and Cheng Chi and Kyle Beltran Hatch and Shan Lin and Jingpei Lu and Jean Mercat and Abdul Rehman and Pannag R Sanketi and Archit Sharma and Cody Simpson and Quan Vuong and Homer Rich Walke and Blake Wulfe and Ted Xiao and Jonathan Heewon Yang and Arefeh Yavary and Tony Z. Zhao and Christopher Agia and Rohan Baijal and Mateo Guaman Castro and Daphne Chen and Qiuyu Chen and Trinity Chung and Jaimyn Drake and Ethan Paul Foster and Jensen Gao and David Antonio Herrera and Minho Heo and Kyle Hsu and Jiaheng Hu and Donovon Jackson and Charlotte Le and Yunshuang Li and Kevin Lin and Roy Lin and Zehan Ma and Abhiram Maddukuri and Suvir Mirchandani and Daniel Morton and Tony Nguyen and Abigail O'Neill and Rosario Scalise and Derick Seale and Victor Son and Stephen Tian and Emi Tran and Andrew E. Wang and Yilin Wu and Annie Xie and Jingyun Yang and Patrick Yin and Yunchu Zhang and Osbert Bastani and Glen Berseth and Jeannette Bohg and Ken Goldberg and Abhinav Gupta and Abhishek Gupta and Dinesh Jayaraman and Joseph J Lim and Jitendra Malik and Roberto Martín-Martín and Subramanian Ramamoorthy and Dorsa Sadigh and Shuran Song and Jiajun Wu and Michael C. Yip and Yuke Zhu and Thomas Kollar and Sergey Levine and Chelsea Finn},
+ year = {2024},
+ }""").lstrip(),
+ },
+ "fmb": {
+ "tasks_col": "language_instruction",
+ "license": "cc-by-4.0",
+ "url": "https://functional-manipulation-benchmark.github.io/",
+ "paper": "https://arxiv.org/abs/2401.08553",
+ "citation_bibtex": dedent(r"""
+ @article{luo2024fmb,
+ title={FMB: a Functional Manipulation Benchmark for Generalizable Robotic Learning},
+ author={Luo, Jianlan and Xu, Charles and Liu, Fangchen and Tan, Liam and Lin, Zipeng and Wu, Jeffrey and Abbeel, Pieter and Levine, Sergey},
+ journal={arXiv preprint arXiv:2401.08553},
+ year={2024}
+ }""").lstrip(),
+ },
+ "iamlab_cmu_pickup_insert": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://openreview.net/forum?id=WuBv9-IGDUA",
+ "paper": "https://arxiv.org/abs/2401.14502",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{saxena2023multiresolution,
+ title={Multi-Resolution Sensing for Real-Time Control with Vision-Language Models},
+ author={Saumya Saxena and Mohit Sharma and Oliver Kroemer},
+ booktitle={7th Annual Conference on Robot Learning},
+ year={2023},
+ url={https://openreview.net/forum?id=WuBv9-IGDUA}
+ }""").lstrip(),
+ },
+ "imperialcollege_sawyer_wrist_cam": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ },
+ "jaco_play": {
+ "tasks_col": "language_instruction",
+ "license": "cc-by-4.0",
+ "url": "https://github.com/clvrai/clvr_jaco_play_dataset",
+ "citation_bibtex": dedent(r"""
+ @software{dass2023jacoplay,
+ author = {Dass, Shivin and Yapeter, Jullian and Zhang, Jesse and Zhang, Jiahui
+ and Pertsch, Karl and Nikolaidis, Stefanos and Lim, Joseph J.},
+ title = {CLVR Jaco Play Dataset},
+ url = {https://github.com/clvrai/clvr_jaco_play_dataset},
+ version = {1.0.0},
+ year = {2023}
+ }""").lstrip(),
+ },
+ "kaist_nonprehensile": {
+ "tasks_col": "language_instruction",
+ "license": "cc-by-4.0",
+ "url": "https://github.com/JaeHyung-Kim/rlds_dataset_builder",
+ "citation_bibtex": dedent(r"""
+ @article{kimpre,
+ title={Pre-and post-contact policy decomposition for non-prehensile manipulation with zero-shot sim-to-real transfer},
+ author={Kim, Minchan and Han, Junhyek and Kim, Jaehyung and Kim, Beomjoon},
+ booktitle={2023 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},
+ year={2023},
+ organization={IEEE}
+ }""").lstrip(),
+ },
+ "nyu_door_opening_surprising_effectiveness": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://jyopari.github.io/VINN/",
+ "paper": "https://arxiv.org/abs/2112.01511",
+ "citation_bibtex": dedent(r"""
+ @misc{pari2021surprising,
+ title={The Surprising Effectiveness of Representation Learning for Visual Imitation},
+ author={Jyothish Pari and Nur Muhammad Shafiullah and Sridhar Pandian Arunachalam and Lerrel Pinto},
+ year={2021},
+ eprint={2112.01511},
+ archivePrefix={arXiv},
+ primaryClass={cs.RO}
+ }""").lstrip(),
+ },
+ "nyu_franka_play_dataset": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://play-to-policy.github.io/",
+ "paper": "https://arxiv.org/abs/2210.10047",
+ "citation_bibtex": dedent(r"""
+ @article{cui2022play,
+ title = {From Play to Policy: Conditional Behavior Generation from Uncurated Robot Data},
+ author = {Cui, Zichen Jeff and Wang, Yibin and Shafiullah, Nur Muhammad Mahi and Pinto, Lerrel},
+ journal = {arXiv preprint arXiv:2210.10047},
+ year = {2022}
+ }""").lstrip(),
+ },
+ "nyu_rot_dataset": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://rot-robot.github.io/",
+ "paper": "https://arxiv.org/abs/2206.15469",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{haldar2023watch,
+ title={Watch and match: Supercharging imitation with regularized optimal transport},
+ author={Haldar, Siddhant and Mathur, Vaibhav and Yarats, Denis and Pinto, Lerrel},
+ booktitle={Conference on Robot Learning},
+ pages={32--43},
+ year={2023},
+ organization={PMLR}
+ }""").lstrip(),
+ },
+ "roboturk": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://roboturk.stanford.edu/dataset_real.html",
+ "paper": "PAPER",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{mandlekar2019scaling,
+ title={Scaling robot supervision to hundreds of hours with roboturk: Robotic manipulation dataset through human reasoning and dexterity},
+ author={Mandlekar, Ajay and Booher, Jonathan and Spero, Max and Tung, Albert and Gupta, Anchit and Zhu, Yuke and Garg, Animesh and Savarese, Silvio and Fei-Fei, Li},
+ booktitle={2019 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},
+ pages={1048--1055},
+ year={2019},
+ organization={IEEE}
+ }""").lstrip(),
+ },
+ "stanford_hydra_dataset": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://sites.google.com/view/hydra-il-2023",
+ "paper": "https://arxiv.org/abs/2306.17237",
+ "citation_bibtex": dedent(r"""
+ @article{belkhale2023hydra,
+ title={HYDRA: Hybrid Robot Actions for Imitation Learning},
+ author={Belkhale, Suneel and Cui, Yuchen and Sadigh, Dorsa},
+ journal={arxiv},
+ year={2023}
+ }""").lstrip(),
+ },
+ "stanford_kuka_multimodal_dataset": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://sites.google.com/view/visionandtouch",
+ "paper": "https://arxiv.org/abs/1810.10191",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{lee2019icra,
+ title={Making sense of vision and touch: Self-supervised learning of multimodal representations for contact-rich tasks},
+ author={Lee, Michelle A and Zhu, Yuke and Srinivasan, Krishnan and Shah, Parth and Savarese, Silvio and Fei-Fei, Li and Garg, Animesh and Bohg, Jeannette},
+ booktitle={2019 IEEE International Conference on Robotics and Automation (ICRA)},
+ year={2019},
+ url={https://arxiv.org/abs/1810.10191}
+ }""").lstrip(),
+ },
+ "stanford_robocook": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://hshi74.github.io/robocook/",
+ "paper": "https://arxiv.org/abs/2306.14447",
+ "citation_bibtex": dedent(r"""
+ @article{shi2023robocook,
+ title={RoboCook: Long-Horizon Elasto-Plastic Object Manipulation with Diverse Tools},
+ author={Shi, Haochen and Xu, Huazhe and Clarke, Samuel and Li, Yunzhu and Wu, Jiajun},
+ journal={arXiv preprint arXiv:2306.14447},
+ year={2023}
+ }""").lstrip(),
+ },
+ "taco_play": {
+ "tasks_col": "language_instruction",
+ "license": "cc-by-4.0",
+ "url": "https://www.kaggle.com/datasets/oiermees/taco-robot",
+ "paper": "https://arxiv.org/abs/2209.08959, https://arxiv.org/abs/2210.01911",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{rosete2022tacorl,
+ author = {Erick Rosete-Beas and Oier Mees and Gabriel Kalweit and Joschka Boedecker and Wolfram Burgard},
+ title = {Latent Plans for Task Agnostic Offline Reinforcement Learning},
+ journal = {Proceedings of the 6th Conference on Robot Learning (CoRL)},
+ year = {2022}
+ }
+ @inproceedings{mees23hulc2,
+ title={Grounding Language with Visual Affordances over Unstructured Data},
+ author={Oier Mees and Jessica Borja-Diaz and Wolfram Burgard},
+ booktitle = {Proceedings of the IEEE International Conference on Robotics and Automation (ICRA)},
+ year={2023},
+ address = {London, UK}
+ }""").lstrip(),
+ },
+ "tokyo_u_lsmo": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "URL",
+ "paper": "https://arxiv.org/abs/2107.05842",
+ "citation_bibtex": dedent(r"""
+ @Article{Osa22,
+ author = {Takayuki Osa},
+ journal = {The International Journal of Robotics Research},
+ title = {Motion Planning by Learning the Solution Manifold in Trajectory Optimization},
+ year = {2022},
+ number = {3},
+ pages = {291--311},
+ volume = {41},
+ }""").lstrip(),
+ },
+ "toto": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://toto-benchmark.org/",
+ "paper": "https://arxiv.org/abs/2306.00942",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{zhou2023train,
+ author={Zhou, Gaoyue and Dean, Victoria and Srirama, Mohan Kumar and Rajeswaran, Aravind and Pari, Jyothish and Hatch, Kyle and Jain, Aryan and Yu, Tianhe and Abbeel, Pieter and Pinto, Lerrel and Finn, Chelsea and Gupta, Abhinav},
+ booktitle={2023 IEEE International Conference on Robotics and Automation (ICRA)},
+ title={Train Offline, Test Online: A Real Robot Learning Benchmark},
+ year={2023},
+ }""").lstrip(),
+ },
+ "ucsd_kitchen_dataset": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "citation_bibtex": dedent(r"""
+ @ARTICLE{ucsd_kitchens,
+ author = {Ge Yan, Kris Wu, and Xiaolong Wang},
+ title = {{ucsd kitchens Dataset}},
+ year = {2023},
+ month = {August}
+ }""").lstrip(),
+ },
+ "ucsd_pick_and_place_dataset": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://owmcorl.github.io/#",
+ "paper": "https://arxiv.org/abs/2310.16029",
+ "citation_bibtex": dedent(r"""
+ @preprint{Feng2023Finetuning,
+ title={Finetuning Offline World Models in the Real World},
+ author={Yunhai Feng, Nicklas Hansen, Ziyan Xiong, Chandramouli Rajagopalan, Xiaolong Wang},
+ year={2023}
+ }""").lstrip(),
+ },
+ "uiuc_d3field": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://robopil.github.io/d3fields/",
+ "paper": "https://arxiv.org/abs/2309.16118",
+ "citation_bibtex": dedent(r"""
+ @article{wang2023d3field,
+ title={D^3Field: Dynamic 3D Descriptor Fields for Generalizable Robotic Manipulation},
+ author={Wang, Yixuan and Li, Zhuoran and Zhang, Mingtong and Driggs-Campbell, Katherine and Wu, Jiajun and Fei-Fei, Li and Li, Yunzhu},
+ journal={arXiv preprint arXiv:},
+ year={2023},
+ }""").lstrip(),
+ },
+ "usc_cloth_sim": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://uscresl.github.io/dmfd/",
+ "paper": "https://arxiv.org/abs/2207.10148",
+ "citation_bibtex": dedent(r"""
+ @article{salhotra2022dmfd,
+ author={Salhotra, Gautam and Liu, I-Chun Arthur and Dominguez-Kuhne, Marcus and Sukhatme, Gaurav S.},
+ journal={IEEE Robotics and Automation Letters},
+ title={Learning Deformable Object Manipulation From Expert Demonstrations},
+ year={2022},
+ volume={7},
+ number={4},
+ pages={8775-8782},
+ doi={10.1109/LRA.2022.3187843}
+ }""").lstrip(),
+ },
+ "utaustin_mutex": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://ut-austin-rpl.github.io/MUTEX/",
+ "paper": "https://arxiv.org/abs/2309.14320",
+ "citation_bibtex": dedent(r"""
+ @inproceedings{shah2023mutex,
+ title={{MUTEX}: Learning Unified Policies from Multimodal Task Specifications},
+ author={Rutav Shah and Roberto Mart{\'\i}n-Mart{\'\i}n and Yuke Zhu},
+ booktitle={7th Annual Conference on Robot Learning},
+ year={2023},
+ url={https://openreview.net/forum?id=PwqiqaaEzJ}
+ }""").lstrip(),
+ },
+ "utokyo_pr2_opening_fridge": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "citation_bibtex": dedent(r"""
+ @misc{oh2023pr2utokyodatasets,
+ author={Jihoon Oh and Naoaki Kanazawa and Kento Kawaharazuka},
+ title={X-Embodiment U-Tokyo PR2 Datasets},
+ year={2023},
+ url={https://github.com/ojh6404/rlds_dataset_builder},
+ }""").lstrip(),
+ },
+ "utokyo_pr2_tabletop_manipulation": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "citation_bibtex": dedent(r"""
+ @misc{oh2023pr2utokyodatasets,
+ author={Jihoon Oh and Naoaki Kanazawa and Kento Kawaharazuka},
+ title={X-Embodiment U-Tokyo PR2 Datasets},
+ year={2023},
+ url={https://github.com/ojh6404/rlds_dataset_builder},
+ }""").lstrip(),
+ },
+ "utokyo_saytap": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://saytap.github.io/",
+ "paper": "https://arxiv.org/abs/2306.07580",
+ "citation_bibtex": dedent(r"""
+ @article{saytap2023,
+ author = {Yujin Tang and Wenhao Yu and Jie Tan and Heiga Zen and Aleksandra Faust and
+ Tatsuya Harada},
+ title = {SayTap: Language to Quadrupedal Locomotion},
+ eprint = {arXiv:2306.07580},
+ url = {https://saytap.github.io},
+ note = {https://saytap.github.io},
+ year = {2023}
+ }""").lstrip(),
+ },
+ "utokyo_xarm_bimanual": {
+ "tasks_col": "language_instruction",
+ "license": "cc-by-4.0",
+ "citation_bibtex": dedent(r"""
+ @misc{matsushima2023weblab,
+ title={Weblab xArm Dataset},
+ author={Tatsuya Matsushima and Hiroki Furuta and Yusuke Iwasawa and Yutaka Matsuo},
+ year={2023},
+ }""").lstrip(),
+ },
+ "utokyo_xarm_pick_and_place": {
+ "tasks_col": "language_instruction",
+ "license": "cc-by-4.0",
+ "citation_bibtex": dedent(r"""
+ @misc{matsushima2023weblab,
+ title={Weblab xArm Dataset},
+ author={Tatsuya Matsushima and Hiroki Furuta and Yusuke Iwasawa and Yutaka Matsuo},
+ year={2023},
+ }""").lstrip(),
+ },
+ "viola": {
+ "tasks_col": "language_instruction",
+ "license": "mit",
+ "url": "https://ut-austin-rpl.github.io/VIOLA/",
+ "paper": "https://arxiv.org/abs/2210.11339",
+ "citation_bibtex": dedent(r"""
+ @article{zhu2022viola,
+ title={VIOLA: Imitation Learning for Vision-Based Manipulation with Object Proposal Priors},
+ author={Zhu, Yifeng and Joshi, Abhishek and Stone, Peter and Zhu, Yuke},
+ journal={6th Annual Conference on Robot Learning (CoRL)},
+ year={2022}
+ }""").lstrip(),
+ },
+}
+
+
+def batch_convert():
+ status = {}
+ logfile = LOCAL_DIR / "conversion_log.txt"
+ assert set(DATASETS) == {id_.split("/")[1] for id_ in available_datasets}
+ for num, (name, kwargs) in enumerate(DATASETS.items()):
+ repo_id = f"lerobot/{name}"
+ print(f"\nConverting {repo_id} ({num}/{len(DATASETS)})")
+ print("---------------------------------------------------------")
+ try:
+ convert_dataset(repo_id, LOCAL_DIR, **kwargs)
+ status = f"{repo_id}: success."
+ with open(logfile, "a") as file:
+ file.write(status + "\n")
+ except Exception:
+ status = f"{repo_id}: failed\n {traceback.format_exc()}"
+ with open(logfile, "a") as file:
+ file.write(status + "\n")
+ continue
+
+
+if __name__ == "__main__":
+ batch_convert()
diff --git a/lerobot/common/datasets/v2/convert_dataset_v1_to_v2.py b/lerobot/common/datasets/v2/convert_dataset_v1_to_v2.py
new file mode 100644
index 000000000..bf135043b
--- /dev/null
+++ b/lerobot/common/datasets/v2/convert_dataset_v1_to_v2.py
@@ -0,0 +1,665 @@
+#!/usr/bin/env python
+
+# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+This script will help you convert any LeRobot dataset already pushed to the hub from codebase version 1.6 to
+2.0. You will be required to provide the 'tasks', which is a short but accurate description in plain English
+for each of the task performed in the dataset. This will allow to easily train models with task-conditionning.
+
+We support 3 different scenarios for these tasks (see instructions below):
+ 1. Single task dataset: all episodes of your dataset have the same single task.
+ 2. Single task episodes: the episodes of your dataset each contain a single task but they can differ from
+ one episode to the next.
+ 3. Multi task episodes: episodes of your dataset may each contain several different tasks.
+
+
+Can you can also provide a robot config .yaml file (not mandatory) to this script via the option
+'--robot-config' so that it writes information about the robot (robot type, motors names) this dataset was
+recorded with. For now, only Aloha/Koch type robots are supported with this option.
+
+
+# 1. Single task dataset
+If your dataset contains a single task, you can simply provide it directly via the CLI with the
+'--single-task' option.
+
+Examples:
+
+```bash
+python lerobot/common/datasets/v2/convert_dataset_v1_to_v2.py \
+ --repo-id lerobot/aloha_sim_insertion_human_image \
+ --single-task "Insert the peg into the socket." \
+ --robot-config lerobot/configs/robot/aloha.yaml \
+ --local-dir data
+```
+
+```bash
+python lerobot/common/datasets/v2/convert_dataset_v1_to_v2.py \
+ --repo-id aliberts/koch_tutorial \
+ --single-task "Pick the Lego block and drop it in the box on the right." \
+ --robot-config lerobot/configs/robot/koch.yaml \
+ --local-dir data
+```
+
+
+# 2. Single task episodes
+If your dataset is a multi-task dataset, you have two options to provide the tasks to this script:
+
+- If your dataset already contains a language instruction column in its parquet file, you can simply provide
+ this column's name with the '--tasks-col' arg.
+
+ Example:
+
+ ```bash
+ python lerobot/common/datasets/v2/convert_dataset_v1_to_v2.py \
+ --repo-id lerobot/stanford_kuka_multimodal_dataset \
+ --tasks-col "language_instruction" \
+ --local-dir data
+ ```
+
+- If your dataset doesn't contain a language instruction, you should provide the path to a .json file with the
+ '--tasks-path' arg. This file should have the following structure where keys correspond to each
+ episode_index in the dataset, and values are the language instruction for that episode.
+
+ Example:
+
+ ```json
+ {
+ "0": "Do something",
+ "1": "Do something else",
+ "2": "Do something",
+ "3": "Go there",
+ ...
+ }
+ ```
+
+# 3. Multi task episodes
+If you have multiple tasks per episodes, your dataset should contain a language instruction column in its
+parquet file, and you must provide this column's name with the '--tasks-col' arg.
+
+Example:
+
+```bash
+python lerobot/common/datasets/v2/convert_dataset_v1_to_v2.py \
+ --repo-id lerobot/stanford_kuka_multimodal_dataset \
+ --tasks-col "language_instruction" \
+ --local-dir data
+```
+"""
+
+import argparse
+import contextlib
+import filecmp
+import json
+import logging
+import math
+import shutil
+import subprocess
+import tempfile
+from pathlib import Path
+
+import datasets
+import pyarrow.compute as pc
+import pyarrow.parquet as pq
+import torch
+from datasets import Dataset
+from huggingface_hub import HfApi
+from huggingface_hub.errors import EntryNotFoundError, HfHubHTTPError
+from safetensors.torch import load_file
+
+from lerobot.common.datasets.utils import (
+ DEFAULT_CHUNK_SIZE,
+ DEFAULT_PARQUET_PATH,
+ DEFAULT_VIDEO_PATH,
+ EPISODES_PATH,
+ INFO_PATH,
+ STATS_PATH,
+ TASKS_PATH,
+ create_branch,
+ create_lerobot_dataset_card,
+ flatten_dict,
+ get_hub_safe_version,
+ load_json,
+ unflatten_dict,
+ write_json,
+ write_jsonlines,
+)
+from lerobot.common.datasets.video_utils import (
+ VideoFrame, # noqa: F401
+ get_image_pixel_channels,
+ get_video_info,
+)
+from lerobot.common.utils.utils import init_hydra_config
+
+V16 = "v1.6"
+V20 = "v2.0"
+
+GITATTRIBUTES_REF = "aliberts/gitattributes_reference"
+V1_VIDEO_FILE = "{video_key}_episode_{episode_index:06d}.mp4"
+V1_INFO_PATH = "meta_data/info.json"
+V1_STATS_PATH = "meta_data/stats.safetensors"
+
+
+def parse_robot_config(config_path: Path, config_overrides: list[str] | None = None) -> tuple[str, dict]:
+ robot_cfg = init_hydra_config(config_path, config_overrides)
+ if robot_cfg["robot_type"] in ["aloha", "koch"]:
+ state_names = [
+ f"{arm}_{motor}" if len(robot_cfg["follower_arms"]) > 1 else motor
+ for arm in robot_cfg["follower_arms"]
+ for motor in robot_cfg["follower_arms"][arm]["motors"]
+ ]
+ action_names = [
+ # f"{arm}_{motor}" for arm in ["left", "right"] for motor in robot_cfg["leader_arms"][arm]["motors"]
+ f"{arm}_{motor}" if len(robot_cfg["leader_arms"]) > 1 else motor
+ for arm in robot_cfg["leader_arms"]
+ for motor in robot_cfg["leader_arms"][arm]["motors"]
+ ]
+ # elif robot_cfg["robot_type"] == "stretch3": TODO
+ else:
+ raise NotImplementedError(
+ "Please provide robot_config={'robot_type': ..., 'names': ...} directly to convert_dataset()."
+ )
+
+ return {
+ "robot_type": robot_cfg["robot_type"],
+ "names": {
+ "observation.state": state_names,
+ "observation.effort": state_names,
+ "action": action_names,
+ },
+ }
+
+
+def convert_stats_to_json(v1_dir: Path, v2_dir: Path) -> None:
+ safetensor_path = v1_dir / V1_STATS_PATH
+ stats = load_file(safetensor_path)
+ serialized_stats = {key: value.tolist() for key, value in stats.items()}
+ serialized_stats = unflatten_dict(serialized_stats)
+
+ json_path = v2_dir / STATS_PATH
+ json_path.parent.mkdir(exist_ok=True, parents=True)
+ with open(json_path, "w") as f:
+ json.dump(serialized_stats, f, indent=4)
+
+ # Sanity check
+ with open(json_path) as f:
+ stats_json = json.load(f)
+
+ stats_json = flatten_dict(stats_json)
+ stats_json = {key: torch.tensor(value) for key, value in stats_json.items()}
+ for key in stats:
+ torch.testing.assert_close(stats_json[key], stats[key])
+
+
+def get_features_from_hf_dataset(dataset: Dataset, robot_config: dict | None = None) -> dict[str, list]:
+ features = {}
+ for key, ft in dataset.features.items():
+ if isinstance(ft, datasets.Value):
+ dtype = ft.dtype
+ shape = (1,)
+ names = None
+ if isinstance(ft, datasets.Sequence):
+ assert isinstance(ft.feature, datasets.Value)
+ dtype = ft.feature.dtype
+ shape = (ft.length,)
+ motor_names = (
+ robot_config["names"][key] if robot_config else [f"motor_{i}" for i in range(ft.length)]
+ )
+ assert len(motor_names) == shape[0]
+ names = {"motors": motor_names}
+ elif isinstance(ft, datasets.Image):
+ dtype = "image"
+ image = dataset[0][key] # Assuming first row
+ channels = get_image_pixel_channels(image)
+ shape = (image.height, image.width, channels)
+ names = ["height", "width", "channel"]
+ elif ft._type == "VideoFrame":
+ dtype = "video"
+ shape = None # Add shape later
+ names = ["height", "width", "channel"]
+
+ features[key] = {
+ "dtype": dtype,
+ "shape": shape,
+ "names": names,
+ }
+
+ return features
+
+
+def add_task_index_by_episodes(dataset: Dataset, tasks_by_episodes: dict) -> tuple[Dataset, list[str]]:
+ df = dataset.to_pandas()
+ tasks = list(set(tasks_by_episodes.values()))
+ tasks_to_task_index = {task: task_idx for task_idx, task in enumerate(tasks)}
+ episodes_to_task_index = {ep_idx: tasks_to_task_index[task] for ep_idx, task in tasks_by_episodes.items()}
+ df["task_index"] = df["episode_index"].map(episodes_to_task_index).astype(int)
+
+ features = dataset.features
+ features["task_index"] = datasets.Value(dtype="int64")
+ dataset = Dataset.from_pandas(df, features=features, split="train")
+ return dataset, tasks
+
+
+def add_task_index_from_tasks_col(
+ dataset: Dataset, tasks_col: str
+) -> tuple[Dataset, dict[str, list[str]], list[str]]:
+ df = dataset.to_pandas()
+
+ # HACK: This is to clean some of the instructions in our version of Open X datasets
+ prefix_to_clean = "tf.Tensor(b'"
+ suffix_to_clean = "', shape=(), dtype=string)"
+ df[tasks_col] = df[tasks_col].str.removeprefix(prefix_to_clean).str.removesuffix(suffix_to_clean)
+
+ # Create task_index col
+ tasks_by_episode = df.groupby("episode_index")[tasks_col].unique().apply(lambda x: x.tolist()).to_dict()
+ tasks = df[tasks_col].unique().tolist()
+ tasks_to_task_index = {task: idx for idx, task in enumerate(tasks)}
+ df["task_index"] = df[tasks_col].map(tasks_to_task_index).astype(int)
+
+ # Build the dataset back from df
+ features = dataset.features
+ features["task_index"] = datasets.Value(dtype="int64")
+ dataset = Dataset.from_pandas(df, features=features, split="train")
+ dataset = dataset.remove_columns(tasks_col)
+
+ return dataset, tasks, tasks_by_episode
+
+
+def split_parquet_by_episodes(
+ dataset: Dataset,
+ total_episodes: int,
+ total_chunks: int,
+ output_dir: Path,
+) -> list:
+ table = dataset.data.table
+ episode_lengths = []
+ for ep_chunk in range(total_chunks):
+ ep_chunk_start = DEFAULT_CHUNK_SIZE * ep_chunk
+ ep_chunk_end = min(DEFAULT_CHUNK_SIZE * (ep_chunk + 1), total_episodes)
+ chunk_dir = "/".join(DEFAULT_PARQUET_PATH.split("/")[:-1]).format(episode_chunk=ep_chunk)
+ (output_dir / chunk_dir).mkdir(parents=True, exist_ok=True)
+ for ep_idx in range(ep_chunk_start, ep_chunk_end):
+ ep_table = table.filter(pc.equal(table["episode_index"], ep_idx))
+ episode_lengths.insert(ep_idx, len(ep_table))
+ output_file = output_dir / DEFAULT_PARQUET_PATH.format(
+ episode_chunk=ep_chunk, episode_index=ep_idx
+ )
+ pq.write_table(ep_table, output_file)
+
+ return episode_lengths
+
+
+def move_videos(
+ repo_id: str,
+ video_keys: list[str],
+ total_episodes: int,
+ total_chunks: int,
+ work_dir: Path,
+ clean_gittatributes: Path,
+ branch: str = "main",
+) -> None:
+ """
+ HACK: Since HfApi() doesn't provide a way to move files directly in a repo, this function will run git
+ commands to fetch git lfs video files references to move them into subdirectories without having to
+ actually download them.
+ """
+ _lfs_clone(repo_id, work_dir, branch)
+
+ videos_moved = False
+ video_files = [str(f.relative_to(work_dir)) for f in work_dir.glob("videos*/*.mp4")]
+ if len(video_files) == 0:
+ video_files = [str(f.relative_to(work_dir)) for f in work_dir.glob("videos*/*/*/*.mp4")]
+ videos_moved = True # Videos have already been moved
+
+ assert len(video_files) == total_episodes * len(video_keys)
+
+ lfs_untracked_videos = _get_lfs_untracked_videos(work_dir, video_files)
+
+ current_gittatributes = work_dir / ".gitattributes"
+ if not filecmp.cmp(current_gittatributes, clean_gittatributes, shallow=False):
+ fix_gitattributes(work_dir, current_gittatributes, clean_gittatributes)
+
+ if lfs_untracked_videos:
+ fix_lfs_video_files_tracking(work_dir, video_files)
+
+ if videos_moved:
+ return
+
+ video_dirs = sorted(work_dir.glob("videos*/"))
+ for ep_chunk in range(total_chunks):
+ ep_chunk_start = DEFAULT_CHUNK_SIZE * ep_chunk
+ ep_chunk_end = min(DEFAULT_CHUNK_SIZE * (ep_chunk + 1), total_episodes)
+ for vid_key in video_keys:
+ chunk_dir = "/".join(DEFAULT_VIDEO_PATH.split("/")[:-1]).format(
+ episode_chunk=ep_chunk, video_key=vid_key
+ )
+ (work_dir / chunk_dir).mkdir(parents=True, exist_ok=True)
+
+ for ep_idx in range(ep_chunk_start, ep_chunk_end):
+ target_path = DEFAULT_VIDEO_PATH.format(
+ episode_chunk=ep_chunk, video_key=vid_key, episode_index=ep_idx
+ )
+ video_file = V1_VIDEO_FILE.format(video_key=vid_key, episode_index=ep_idx)
+ if len(video_dirs) == 1:
+ video_path = video_dirs[0] / video_file
+ else:
+ for dir in video_dirs:
+ if (dir / video_file).is_file():
+ video_path = dir / video_file
+ break
+
+ video_path.rename(work_dir / target_path)
+
+ commit_message = "Move video files into chunk subdirectories"
+ subprocess.run(["git", "add", "."], cwd=work_dir, check=True)
+ subprocess.run(["git", "commit", "-m", commit_message], cwd=work_dir, check=True)
+ subprocess.run(["git", "push"], cwd=work_dir, check=True)
+
+
+def fix_lfs_video_files_tracking(work_dir: Path, lfs_untracked_videos: list[str]) -> None:
+ """
+ HACK: This function fixes the tracking by git lfs which was not properly set on some repos. In that case,
+ there's no other option than to download the actual files and reupload them with lfs tracking.
+ """
+ for i in range(0, len(lfs_untracked_videos), 100):
+ files = lfs_untracked_videos[i : i + 100]
+ try:
+ subprocess.run(["git", "rm", "--cached", *files], cwd=work_dir, capture_output=True, check=True)
+ except subprocess.CalledProcessError as e:
+ print("git rm --cached ERROR:")
+ print(e.stderr)
+ subprocess.run(["git", "add", *files], cwd=work_dir, check=True)
+
+ commit_message = "Track video files with git lfs"
+ subprocess.run(["git", "commit", "-m", commit_message], cwd=work_dir, check=True)
+ subprocess.run(["git", "push"], cwd=work_dir, check=True)
+
+
+def fix_gitattributes(work_dir: Path, current_gittatributes: Path, clean_gittatributes: Path) -> None:
+ shutil.copyfile(clean_gittatributes, current_gittatributes)
+ subprocess.run(["git", "add", ".gitattributes"], cwd=work_dir, check=True)
+ subprocess.run(["git", "commit", "-m", "Fix .gitattributes"], cwd=work_dir, check=True)
+ subprocess.run(["git", "push"], cwd=work_dir, check=True)
+
+
+def _lfs_clone(repo_id: str, work_dir: Path, branch: str) -> None:
+ subprocess.run(["git", "lfs", "install"], cwd=work_dir, check=True)
+ repo_url = f"https://huggingface.co/datasets/{repo_id}"
+ env = {"GIT_LFS_SKIP_SMUDGE": "1"} # Prevent downloading LFS files
+ subprocess.run(
+ ["git", "clone", "--branch", branch, "--single-branch", "--depth", "1", repo_url, str(work_dir)],
+ check=True,
+ env=env,
+ )
+
+
+def _get_lfs_untracked_videos(work_dir: Path, video_files: list[str]) -> list[str]:
+ lfs_tracked_files = subprocess.run(
+ ["git", "lfs", "ls-files", "-n"], cwd=work_dir, capture_output=True, text=True, check=True
+ )
+ lfs_tracked_files = set(lfs_tracked_files.stdout.splitlines())
+ return [f for f in video_files if f not in lfs_tracked_files]
+
+
+def get_videos_info(repo_id: str, local_dir: Path, video_keys: list[str], branch: str) -> dict:
+ # Assumes first episode
+ video_files = [
+ DEFAULT_VIDEO_PATH.format(episode_chunk=0, video_key=vid_key, episode_index=0)
+ for vid_key in video_keys
+ ]
+ hub_api = HfApi()
+ hub_api.snapshot_download(
+ repo_id=repo_id, repo_type="dataset", local_dir=local_dir, revision=branch, allow_patterns=video_files
+ )
+ videos_info_dict = {}
+ for vid_key, vid_path in zip(video_keys, video_files, strict=True):
+ videos_info_dict[vid_key] = get_video_info(local_dir / vid_path)
+
+ return videos_info_dict
+
+
+def convert_dataset(
+ repo_id: str,
+ local_dir: Path,
+ single_task: str | None = None,
+ tasks_path: Path | None = None,
+ tasks_col: Path | None = None,
+ robot_config: dict | None = None,
+ test_branch: str | None = None,
+ **card_kwargs,
+):
+ v1 = get_hub_safe_version(repo_id, V16)
+ v1x_dir = local_dir / V16 / repo_id
+ v20_dir = local_dir / V20 / repo_id
+ v1x_dir.mkdir(parents=True, exist_ok=True)
+ v20_dir.mkdir(parents=True, exist_ok=True)
+
+ hub_api = HfApi()
+ hub_api.snapshot_download(
+ repo_id=repo_id, repo_type="dataset", revision=v1, local_dir=v1x_dir, ignore_patterns="videos*/"
+ )
+ branch = "main"
+ if test_branch:
+ branch = test_branch
+ create_branch(repo_id=repo_id, branch=test_branch, repo_type="dataset")
+
+ metadata_v1 = load_json(v1x_dir / V1_INFO_PATH)
+ dataset = datasets.load_dataset("parquet", data_dir=v1x_dir / "data", split="train")
+ features = get_features_from_hf_dataset(dataset, robot_config)
+ video_keys = [key for key, ft in features.items() if ft["dtype"] == "video"]
+
+ if single_task and "language_instruction" in dataset.column_names:
+ logging.warning(
+ "'single_task' provided but 'language_instruction' tasks_col found. Using 'language_instruction'.",
+ )
+ single_task = None
+ tasks_col = "language_instruction"
+
+ # Episodes & chunks
+ episode_indices = sorted(dataset.unique("episode_index"))
+ total_episodes = len(episode_indices)
+ assert episode_indices == list(range(total_episodes))
+ total_videos = total_episodes * len(video_keys)
+ total_chunks = total_episodes // DEFAULT_CHUNK_SIZE
+ if total_episodes % DEFAULT_CHUNK_SIZE != 0:
+ total_chunks += 1
+
+ # Tasks
+ if single_task:
+ tasks_by_episodes = {ep_idx: single_task for ep_idx in episode_indices}
+ dataset, tasks = add_task_index_by_episodes(dataset, tasks_by_episodes)
+ tasks_by_episodes = {ep_idx: [task] for ep_idx, task in tasks_by_episodes.items()}
+ elif tasks_path:
+ tasks_by_episodes = load_json(tasks_path)
+ tasks_by_episodes = {int(ep_idx): task for ep_idx, task in tasks_by_episodes.items()}
+ dataset, tasks = add_task_index_by_episodes(dataset, tasks_by_episodes)
+ tasks_by_episodes = {ep_idx: [task] for ep_idx, task in tasks_by_episodes.items()}
+ elif tasks_col:
+ dataset, tasks, tasks_by_episodes = add_task_index_from_tasks_col(dataset, tasks_col)
+ else:
+ raise ValueError
+
+ assert set(tasks) == {task for ep_tasks in tasks_by_episodes.values() for task in ep_tasks}
+ tasks = [{"task_index": task_idx, "task": task} for task_idx, task in enumerate(tasks)]
+ write_jsonlines(tasks, v20_dir / TASKS_PATH)
+ features["task_index"] = {
+ "dtype": "int64",
+ "shape": (1,),
+ "names": None,
+ }
+
+ # Videos
+ if video_keys:
+ assert metadata_v1.get("video", False)
+ dataset = dataset.remove_columns(video_keys)
+ clean_gitattr = Path(
+ hub_api.hf_hub_download(
+ repo_id=GITATTRIBUTES_REF, repo_type="dataset", local_dir=local_dir, filename=".gitattributes"
+ )
+ ).absolute()
+ with tempfile.TemporaryDirectory() as tmp_video_dir:
+ move_videos(
+ repo_id, video_keys, total_episodes, total_chunks, Path(tmp_video_dir), clean_gitattr, branch
+ )
+ videos_info = get_videos_info(repo_id, v1x_dir, video_keys=video_keys, branch=branch)
+ for key in video_keys:
+ features[key]["shape"] = (
+ videos_info[key].pop("video.height"),
+ videos_info[key].pop("video.width"),
+ videos_info[key].pop("video.channels"),
+ )
+ features[key]["video_info"] = videos_info[key]
+ assert math.isclose(videos_info[key]["video.fps"], metadata_v1["fps"], rel_tol=1e-3)
+ if "encoding" in metadata_v1:
+ assert videos_info[key]["video.pix_fmt"] == metadata_v1["encoding"]["pix_fmt"]
+ else:
+ assert metadata_v1.get("video", 0) == 0
+ videos_info = None
+
+ # Split data into 1 parquet file by episode
+ episode_lengths = split_parquet_by_episodes(dataset, total_episodes, total_chunks, v20_dir)
+
+ if robot_config is not None:
+ robot_type = robot_config["robot_type"]
+ repo_tags = [robot_type]
+ else:
+ robot_type = "unknown"
+ repo_tags = None
+
+ # Episodes
+ episodes = [
+ {"episode_index": ep_idx, "tasks": tasks_by_episodes[ep_idx], "length": episode_lengths[ep_idx]}
+ for ep_idx in episode_indices
+ ]
+ write_jsonlines(episodes, v20_dir / EPISODES_PATH)
+
+ # Assemble metadata v2.0
+ metadata_v2_0 = {
+ "codebase_version": V20,
+ "robot_type": robot_type,
+ "total_episodes": total_episodes,
+ "total_frames": len(dataset),
+ "total_tasks": len(tasks),
+ "total_videos": total_videos,
+ "total_chunks": total_chunks,
+ "chunks_size": DEFAULT_CHUNK_SIZE,
+ "fps": metadata_v1["fps"],
+ "splits": {"train": f"0:{total_episodes}"},
+ "data_path": DEFAULT_PARQUET_PATH,
+ "video_path": DEFAULT_VIDEO_PATH if video_keys else None,
+ "features": features,
+ }
+ write_json(metadata_v2_0, v20_dir / INFO_PATH)
+ convert_stats_to_json(v1x_dir, v20_dir)
+ card = create_lerobot_dataset_card(tags=repo_tags, dataset_info=metadata_v2_0, **card_kwargs)
+
+ with contextlib.suppress(EntryNotFoundError, HfHubHTTPError):
+ hub_api.delete_folder(repo_id=repo_id, path_in_repo="data", repo_type="dataset", revision=branch)
+
+ with contextlib.suppress(EntryNotFoundError, HfHubHTTPError):
+ hub_api.delete_folder(repo_id=repo_id, path_in_repo="meta_data", repo_type="dataset", revision=branch)
+
+ with contextlib.suppress(EntryNotFoundError, HfHubHTTPError):
+ hub_api.delete_folder(repo_id=repo_id, path_in_repo="meta", repo_type="dataset", revision=branch)
+
+ hub_api.upload_folder(
+ repo_id=repo_id,
+ path_in_repo="data",
+ folder_path=v20_dir / "data",
+ repo_type="dataset",
+ revision=branch,
+ )
+ hub_api.upload_folder(
+ repo_id=repo_id,
+ path_in_repo="meta",
+ folder_path=v20_dir / "meta",
+ repo_type="dataset",
+ revision=branch,
+ )
+
+ card.push_to_hub(repo_id=repo_id, repo_type="dataset", revision=branch)
+
+ if not test_branch:
+ create_branch(repo_id=repo_id, branch=V20, repo_type="dataset")
+
+
+def main():
+ parser = argparse.ArgumentParser()
+ task_args = parser.add_mutually_exclusive_group(required=True)
+
+ parser.add_argument(
+ "--repo-id",
+ type=str,
+ required=True,
+ help="Repository identifier on Hugging Face: a community or a user name `/` the name of the dataset (e.g. `lerobot/pusht`, `cadene/aloha_sim_insertion_human`).",
+ )
+ task_args.add_argument(
+ "--single-task",
+ type=str,
+ help="A short but accurate description of the single task performed in the dataset.",
+ )
+ task_args.add_argument(
+ "--tasks-col",
+ type=str,
+ help="The name of the column containing language instructions",
+ )
+ task_args.add_argument(
+ "--tasks-path",
+ type=Path,
+ help="The path to a .json file containing one language instruction for each episode_index",
+ )
+ parser.add_argument(
+ "--robot-config",
+ type=Path,
+ default=None,
+ help="Path to the robot's config yaml the dataset during conversion.",
+ )
+ parser.add_argument(
+ "--robot-overrides",
+ type=str,
+ nargs="*",
+ help="Any key=value arguments to override the robot config values (use dots for.nested=overrides)",
+ )
+ parser.add_argument(
+ "--local-dir",
+ type=Path,
+ default=None,
+ help="Local directory to store the dataset during conversion. Defaults to /tmp/lerobot_dataset_v2",
+ )
+ parser.add_argument(
+ "--license",
+ type=str,
+ default="apache-2.0",
+ help="Repo license. Must be one of https://huggingface.co/docs/hub/repositories-licenses. Defaults to mit.",
+ )
+ parser.add_argument(
+ "--test-branch",
+ type=str,
+ default=None,
+ help="Repo branch to test your conversion first (e.g. 'v2.0.test')",
+ )
+
+ args = parser.parse_args()
+ if not args.local_dir:
+ args.local_dir = Path("/tmp/lerobot_dataset_v2")
+
+ robot_config = parse_robot_config(args.robot_config, args.robot_overrides) if args.robot_config else None
+ del args.robot_config, args.robot_overrides
+
+ convert_dataset(**vars(args), robot_config=robot_config)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/lerobot/common/datasets/video_utils.py b/lerobot/common/datasets/video_utils.py
index 4d4ac6b0a..8ed3318dd 100644
--- a/lerobot/common/datasets/video_utils.py
+++ b/lerobot/common/datasets/video_utils.py
@@ -13,6 +13,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+import json
import logging
import subprocess
import warnings
@@ -25,47 +26,11 @@
import torch
import torchvision
from datasets.features.features import register_feature
-
-
-def load_from_videos(
- item: dict[str, torch.Tensor],
- video_frame_keys: list[str],
- videos_dir: Path,
- tolerance_s: float,
- backend: str = "pyav",
-):
- """Note: When using data workers (e.g. DataLoader with num_workers>0), do not call this function
- in the main process (e.g. by using a second Dataloader with num_workers=0). It will result in a Segmentation Fault.
- This probably happens because a memory reference to the video loader is created in the main process and a
- subprocess fails to access it.
- """
- # since video path already contains "videos" (e.g. videos_dir="data/videos", path="videos/episode_0.mp4")
- data_dir = videos_dir.parent
-
- for key in video_frame_keys:
- if isinstance(item[key], list):
- # load multiple frames at once (expected when delta_timestamps is not None)
- timestamps = [frame["timestamp"] for frame in item[key]]
- paths = [frame["path"] for frame in item[key]]
- if len(set(paths)) > 1:
- raise NotImplementedError("All video paths are expected to be the same for now.")
- video_path = data_dir / paths[0]
-
- frames = decode_video_frames_torchvision(video_path, timestamps, tolerance_s, backend)
- item[key] = frames
- else:
- # load one frame
- timestamps = [item[key]["timestamp"]]
- video_path = data_dir / item[key]["path"]
-
- frames = decode_video_frames_torchvision(video_path, timestamps, tolerance_s, backend)
- item[key] = frames[0]
-
- return item
+from PIL import Image
def decode_video_frames_torchvision(
- video_path: str,
+ video_path: Path | str,
timestamps: list[float],
tolerance_s: float,
backend: str = "pyav",
@@ -163,8 +128,8 @@ def decode_video_frames_torchvision(
def encode_video_frames(
- imgs_dir: Path,
- video_path: Path,
+ imgs_dir: Path | str,
+ video_path: Path | str,
fps: int,
vcodec: str = "libsvtav1",
pix_fmt: str = "yuv420p",
@@ -247,3 +212,104 @@ def __call__(self):
)
# to make VideoFrame available in HuggingFace `datasets`
register_feature(VideoFrame, "VideoFrame")
+
+
+def get_audio_info(video_path: Path | str) -> dict:
+ ffprobe_audio_cmd = [
+ "ffprobe",
+ "-v",
+ "error",
+ "-select_streams",
+ "a:0",
+ "-show_entries",
+ "stream=channels,codec_name,bit_rate,sample_rate,bit_depth,channel_layout,duration",
+ "-of",
+ "json",
+ str(video_path),
+ ]
+ result = subprocess.run(ffprobe_audio_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
+ if result.returncode != 0:
+ raise RuntimeError(f"Error running ffprobe: {result.stderr}")
+
+ info = json.loads(result.stdout)
+ audio_stream_info = info["streams"][0] if info.get("streams") else None
+ if audio_stream_info is None:
+ return {"has_audio": False}
+
+ # Return the information, defaulting to None if no audio stream is present
+ return {
+ "has_audio": True,
+ "audio.channels": audio_stream_info.get("channels", None),
+ "audio.codec": audio_stream_info.get("codec_name", None),
+ "audio.bit_rate": int(audio_stream_info["bit_rate"]) if audio_stream_info.get("bit_rate") else None,
+ "audio.sample_rate": int(audio_stream_info["sample_rate"])
+ if audio_stream_info.get("sample_rate")
+ else None,
+ "audio.bit_depth": audio_stream_info.get("bit_depth", None),
+ "audio.channel_layout": audio_stream_info.get("channel_layout", None),
+ }
+
+
+def get_video_info(video_path: Path | str) -> dict:
+ ffprobe_video_cmd = [
+ "ffprobe",
+ "-v",
+ "error",
+ "-select_streams",
+ "v:0",
+ "-show_entries",
+ "stream=r_frame_rate,width,height,codec_name,nb_frames,duration,pix_fmt",
+ "-of",
+ "json",
+ str(video_path),
+ ]
+ result = subprocess.run(ffprobe_video_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
+ if result.returncode != 0:
+ raise RuntimeError(f"Error running ffprobe: {result.stderr}")
+
+ info = json.loads(result.stdout)
+ video_stream_info = info["streams"][0]
+
+ # Calculate fps from r_frame_rate
+ r_frame_rate = video_stream_info["r_frame_rate"]
+ num, denom = map(int, r_frame_rate.split("/"))
+ fps = num / denom
+
+ pixel_channels = get_video_pixel_channels(video_stream_info["pix_fmt"])
+
+ video_info = {
+ "video.fps": fps,
+ "video.height": video_stream_info["height"],
+ "video.width": video_stream_info["width"],
+ "video.channels": pixel_channels,
+ "video.codec": video_stream_info["codec_name"],
+ "video.pix_fmt": video_stream_info["pix_fmt"],
+ "video.is_depth_map": False,
+ **get_audio_info(video_path),
+ }
+
+ return video_info
+
+
+def get_video_pixel_channels(pix_fmt: str) -> int:
+ if "gray" in pix_fmt or "depth" in pix_fmt or "monochrome" in pix_fmt:
+ return 1
+ elif "rgba" in pix_fmt or "yuva" in pix_fmt:
+ return 4
+ elif "rgb" in pix_fmt or "yuv" in pix_fmt:
+ return 3
+ else:
+ raise ValueError("Unknown format")
+
+
+def get_image_pixel_channels(image: Image):
+ if image.mode == "L":
+ return 1 # Grayscale
+ elif image.mode == "LA":
+ return 2 # Grayscale + Alpha
+ elif image.mode == "RGB":
+ return 3 # RGB
+ elif image.mode == "RGBA":
+ return 4 # RGBA
+ else:
+ raise ValueError("Unknown format")
diff --git a/lerobot/common/envs/factory.py b/lerobot/common/envs/factory.py
index 54f24ea84..71e913cd9 100644
--- a/lerobot/common/envs/factory.py
+++ b/lerobot/common/envs/factory.py
@@ -19,7 +19,7 @@
from omegaconf import DictConfig
-def make_env(cfg: DictConfig, n_envs: int | None = None) -> gym.vector.VectorEnv | None:
+def make_env(cfg: DictConfig, n_envs: int | None = None, out_dir: str = "") -> gym.vector.VectorEnv | None:
"""Makes a gym vector environment according to the evaluation config.
n_envs can be used to override eval.batch_size in the configuration. Must be at least 1.
diff --git a/lerobot/common/logger.py b/lerobot/common/logger.py
index bf578fcc5..06bc7eb65 100644
--- a/lerobot/common/logger.py
+++ b/lerobot/common/logger.py
@@ -189,7 +189,7 @@ def save_training_state(
training_state["scheduler"] = scheduler.state_dict()
torch.save(training_state, save_dir / self.training_state_file_name)
- def save_checkpont(
+ def save_checkpoint(
self,
train_step: int,
policy: Policy,
@@ -208,6 +208,8 @@ def save_checkpont(
checkpoint_dir / self.pretrained_model_dir_name, policy, wandb_artifact_name=wandb_artifact_name
)
self.save_training_state(checkpoint_dir, train_step, optimizer, scheduler)
+ if self.last_checkpoint_dir.exists() or self.last_checkpoint_dir.is_symlink():
+ self.last_checkpoint_dir.unlink() # Remove the existing symlink or file
os.symlink(checkpoint_dir.absolute(), self.last_checkpoint_dir)
def load_last_training_state(self, optimizer: Optimizer, scheduler: LRScheduler | None) -> int:
diff --git a/lerobot/common/policies/diffusion/configuration_diffusion.py b/lerobot/common/policies/diffusion/configuration_diffusion.py
index bd3692ace..531f49e4d 100644
--- a/lerobot/common/policies/diffusion/configuration_diffusion.py
+++ b/lerobot/common/policies/diffusion/configuration_diffusion.py
@@ -67,6 +67,7 @@ class DiffusionConfig:
use_group_norm: Whether to replace batch normalization with group normalization in the backbone.
The group sizes are set to be about 16 (to be precise, feature_dim // 16).
spatial_softmax_num_keypoints: Number of keypoints for SpatialSoftmax.
+ use_separate_rgb_encoders_per_camera: Whether to use a separate RGB encoder for each camera view.
down_dims: Feature dimension for each stage of temporal downsampling in the diffusion modeling Unet.
You may provide a variable number of dimensions, therefore also controlling the degree of
downsampling.
@@ -130,6 +131,7 @@ class DiffusionConfig:
pretrained_backbone_weights: str | None = None
use_group_norm: bool = True
spatial_softmax_num_keypoints: int = 32
+ use_separate_rgb_encoder_per_camera: bool = False
# Unet.
down_dims: tuple[int, ...] = (512, 1024, 2048)
kernel_size: int = 5
diff --git a/lerobot/common/policies/diffusion/modeling_diffusion.py b/lerobot/common/policies/diffusion/modeling_diffusion.py
index 308a8be3c..9ba562600 100644
--- a/lerobot/common/policies/diffusion/modeling_diffusion.py
+++ b/lerobot/common/policies/diffusion/modeling_diffusion.py
@@ -182,8 +182,13 @@ def __init__(self, config: DiffusionConfig):
self._use_env_state = False
if num_images > 0:
self._use_images = True
- self.rgb_encoder = DiffusionRgbEncoder(config)
- global_cond_dim += self.rgb_encoder.feature_dim * num_images
+ if self.config.use_separate_rgb_encoder_per_camera:
+ encoders = [DiffusionRgbEncoder(config) for _ in range(num_images)]
+ self.rgb_encoder = nn.ModuleList(encoders)
+ global_cond_dim += encoders[0].feature_dim * num_images
+ else:
+ self.rgb_encoder = DiffusionRgbEncoder(config)
+ global_cond_dim += self.rgb_encoder.feature_dim * num_images
if "observation.environment_state" in config.input_shapes:
self._use_env_state = True
global_cond_dim += config.input_shapes["observation.environment_state"][0]
@@ -239,16 +244,32 @@ def _prepare_global_conditioning(self, batch: dict[str, Tensor]) -> Tensor:
"""Encode image features and concatenate them all together along with the state vector."""
batch_size, n_obs_steps = batch["observation.state"].shape[:2]
global_cond_feats = [batch["observation.state"]]
- # Extract image feature (first combine batch, sequence, and camera index dims).
+ # Extract image features.
if self._use_images:
- img_features = self.rgb_encoder(
- einops.rearrange(batch["observation.images"], "b s n ... -> (b s n) ...")
- )
- # Separate batch dim and sequence dim back out. The camera index dim gets absorbed into the
- # feature dim (effectively concatenating the camera features).
- img_features = einops.rearrange(
- img_features, "(b s n) ... -> b s (n ...)", b=batch_size, s=n_obs_steps
- )
+ if self.config.use_separate_rgb_encoder_per_camera:
+ # Combine batch and sequence dims while rearranging to make the camera index dimension first.
+ images_per_camera = einops.rearrange(batch["observation.images"], "b s n ... -> n (b s) ...")
+ img_features_list = torch.cat(
+ [
+ encoder(images)
+ for encoder, images in zip(self.rgb_encoder, images_per_camera, strict=True)
+ ]
+ )
+ # Separate batch and sequence dims back out. The camera index dim gets absorbed into the
+ # feature dim (effectively concatenating the camera features).
+ img_features = einops.rearrange(
+ img_features_list, "(n b s) ... -> b s (n ...)", b=batch_size, s=n_obs_steps
+ )
+ else:
+ # Combine batch, sequence, and "which camera" dims before passing to shared encoder.
+ img_features = self.rgb_encoder(
+ einops.rearrange(batch["observation.images"], "b s n ... -> (b s n) ...")
+ )
+ # Separate batch dim and sequence dim back out. The camera index dim gets absorbed into the
+ # feature dim (effectively concatenating the camera features).
+ img_features = einops.rearrange(
+ img_features, "(b s n) ... -> b s (n ...)", b=batch_size, s=n_obs_steps
+ )
global_cond_feats.append(img_features)
if self._use_env_state:
diff --git a/lerobot/common/robot_devices/cameras/intelrealsense.py b/lerobot/common/robot_devices/cameras/intelrealsense.py
index 4806bf785..84ac540f2 100644
--- a/lerobot/common/robot_devices/cameras/intelrealsense.py
+++ b/lerobot/common/robot_devices/cameras/intelrealsense.py
@@ -5,80 +5,99 @@
import argparse
import concurrent.futures
import logging
+import math
import shutil
import threading
import time
import traceback
+from collections import Counter
from dataclasses import dataclass, replace
from pathlib import Path
from threading import Thread
-import cv2
import numpy as np
-import pyrealsense2 as rs
from PIL import Image
from lerobot.common.robot_devices.utils import (
RobotDeviceAlreadyConnectedError,
RobotDeviceNotConnectedError,
+ busy_wait,
)
from lerobot.common.utils.utils import capture_timestamp_utc
-from lerobot.scripts.control_robot import busy_wait
SERIAL_NUMBER_INDEX = 1
-def find_camera_indices(raise_when_empty=True) -> list[int]:
+def find_cameras(raise_when_empty=True, mock=False) -> list[dict]:
"""
- Find the serial numbers of the Intel RealSense cameras
+ Find the names and the serial numbers of the Intel RealSense cameras
connected to the computer.
"""
- camera_ids = []
+ if mock:
+ import tests.mock_pyrealsense2 as rs
+ else:
+ import pyrealsense2 as rs
+
+ cameras = []
for device in rs.context().query_devices():
serial_number = int(device.get_info(rs.camera_info(SERIAL_NUMBER_INDEX)))
- camera_ids.append(serial_number)
+ name = device.get_info(rs.camera_info.name)
+ cameras.append(
+ {
+ "serial_number": serial_number,
+ "name": name,
+ }
+ )
- if raise_when_empty and len(camera_ids) == 0:
+ if raise_when_empty and len(cameras) == 0:
raise OSError(
"Not a single camera was detected. Try re-plugging, or re-installing `librealsense` and its python wrapper `pyrealsense2`, or updating the firmware."
)
- return camera_ids
+ return cameras
-def save_image(img_array, camera_idx, frame_index, images_dir):
+def save_image(img_array, serial_number, frame_index, images_dir):
try:
img = Image.fromarray(img_array)
- path = images_dir / f"camera_{camera_idx}_frame_{frame_index:06d}.png"
+ path = images_dir / f"camera_{serial_number}_frame_{frame_index:06d}.png"
path.parent.mkdir(parents=True, exist_ok=True)
img.save(str(path), quality=100)
logging.info(f"Saved image: {path}")
except Exception as e:
- logging.error(f"Failed to save image for camera {camera_idx} frame {frame_index}: {e}")
+ logging.error(f"Failed to save image for camera {serial_number} frame {frame_index}: {e}")
def save_images_from_cameras(
images_dir: Path,
- camera_ids: list[int] | None = None,
+ serial_numbers: list[int] | None = None,
fps=None,
width=None,
height=None,
record_time_s=2,
+ mock=False,
):
"""
Initializes all the cameras and saves images to the directory. Useful to visually identify the camera
- associated to a given camera index.
+ associated to a given serial number.
"""
- if camera_ids is None:
- camera_ids = find_camera_indices()
+ if serial_numbers is None or len(serial_numbers) == 0:
+ camera_infos = find_cameras(mock=mock)
+ serial_numbers = [cam["serial_number"] for cam in camera_infos]
+
+ if mock:
+ import tests.mock_cv2 as cv2
+ else:
+ import cv2
print("Connecting cameras")
cameras = []
- for cam_idx in camera_ids:
- camera = IntelRealSenseCamera(cam_idx, fps=fps, width=width, height=height)
+ for cam_sn in serial_numbers:
+ print(f"{cam_sn=}")
+ camera = IntelRealSenseCamera(cam_sn, fps=fps, width=width, height=height, mock=mock)
camera.connect()
print(
- f"IntelRealSenseCamera({camera.camera_index}, fps={camera.fps}, width={camera.width}, height={camera.height}, color_mode={camera.color_mode})"
+ f"IntelRealSenseCamera({camera.serial_number}, fps={camera.fps}, width={camera.width}, height={camera.height}, color_mode={camera.color_mode})"
)
cameras.append(camera)
@@ -93,7 +112,7 @@ def save_images_from_cameras(
frame_index = 0
start_time = time.perf_counter()
try:
- with concurrent.futures.ThreadPoolExecutor(max_workers=4) as executor:
+ with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
while True:
now = time.perf_counter()
@@ -103,12 +122,13 @@ def save_images_from_cameras(
image = camera.read() if fps is None else camera.async_read()
if image is None:
print("No Frame")
+
bgr_converted_image = cv2.cvtColor(image, cv2.COLOR_RGB2BGR)
executor.submit(
save_image,
bgr_converted_image,
- camera.camera_index,
+ camera.serial_number,
frame_index,
images_dir,
)
@@ -140,6 +160,7 @@ class IntelRealSenseCameraConfig:
IntelRealSenseCameraConfig(90, 640, 480)
IntelRealSenseCameraConfig(30, 1280, 720)
IntelRealSenseCameraConfig(30, 640, 480, use_depth=True)
+ IntelRealSenseCameraConfig(30, 640, 480, rotation=90)
```
"""
@@ -147,8 +168,11 @@ class IntelRealSenseCameraConfig:
width: int | None = None
height: int | None = None
color_mode: str = "rgb"
+ channels: int | None = None
use_depth: bool = False
force_hardware_reset: bool = True
+ rotation: int | None = None
+ mock: bool = False
def __post_init__(self):
if self.color_mode not in ["rgb", "bgr"]:
@@ -156,19 +180,25 @@ def __post_init__(self):
f"`color_mode` is expected to be 'rgb' or 'bgr', but {self.color_mode} is provided."
)
- if (self.fps or self.width or self.height) and not (self.fps and self.width and self.height):
+ self.channels = 3
+
+ at_least_one_is_not_none = self.fps is not None or self.width is not None or self.height is not None
+ at_least_one_is_none = self.fps is None or self.width is None or self.height is None
+ if at_least_one_is_not_none and at_least_one_is_none:
raise ValueError(
"For `fps`, `width` and `height`, either all of them need to be set, or none of them, "
f"but {self.fps=}, {self.width=}, {self.height=} were provided."
)
+ if self.rotation not in [-90, None, 90, 180]:
+ raise ValueError(f"`rotation` must be in [-90, None, 90, 180] (got {self.rotation})")
+
class IntelRealSenseCamera:
"""
The IntelRealSenseCamera class is similar to OpenCVCamera class but adds additional features for Intel Real Sense cameras:
- - camera_index corresponds to the serial number of the camera,
- - camera_index won't randomly change as it can be the case of OpenCVCamera for Linux,
- - read is more reliable than OpenCVCamera,
+ - is instantiated with the serial number of the camera - won't randomly change as it can be the case of OpenCVCamera for Linux,
+ - can also be instantiated with the camera's name — if it's unique — using IntelRealSenseCamera.init_from_name(),
- depth map can be returned.
To find the camera indices of your cameras, you can run our utility script that will save a few frames for each camera:
@@ -181,8 +211,10 @@ class IntelRealSenseCamera:
Example of usage:
```python
- camera_index = 128422271347
- camera = IntelRealSenseCamera(camera_index)
+ # Instantiate with its serial number
+ camera = IntelRealSenseCamera(128422271347)
+ # Or by its name if it's unique
+ camera = IntelRealSenseCamera.init_from_name("Intel RealSense D405")
camera.connect()
color_image = camera.read()
# when done using the camera, consider disconnecting
@@ -191,19 +223,19 @@ class IntelRealSenseCamera:
Example of changing default fps, width, height and color_mode:
```python
- camera = IntelRealSenseCamera(camera_index, fps=30, width=1280, height=720)
+ camera = IntelRealSenseCamera(serial_number, fps=30, width=1280, height=720)
camera = connect() # applies the settings, might error out if these settings are not compatible with the camera
- camera = IntelRealSenseCamera(camera_index, fps=90, width=640, height=480)
+ camera = IntelRealSenseCamera(serial_number, fps=90, width=640, height=480)
camera = connect()
- camera = IntelRealSenseCamera(camera_index, fps=90, width=640, height=480, color_mode="bgr")
+ camera = IntelRealSenseCamera(serial_number, fps=90, width=640, height=480, color_mode="bgr")
camera = connect()
```
Example of returning depth:
```python
- camera = IntelRealSenseCamera(camera_index, use_depth=True)
+ camera = IntelRealSenseCamera(serial_number, use_depth=True)
camera.connect()
color_image, depth_map = camera.read()
```
@@ -211,7 +243,7 @@ class IntelRealSenseCamera:
def __init__(
self,
- camera_index: int,
+ serial_number: int,
config: IntelRealSenseCameraConfig | None = None,
**kwargs,
):
@@ -221,13 +253,15 @@ def __init__(
# Overwrite the config arguments using kwargs
config = replace(config, **kwargs)
- self.camera_index = camera_index
+ self.serial_number = serial_number
self.fps = config.fps
self.width = config.width
self.height = config.height
+ self.channels = config.channels
self.color_mode = config.color_mode
self.use_depth = config.use_depth
self.force_hardware_reset = config.force_hardware_reset
+ self.mock = config.mock
self.camera = None
self.is_connected = False
@@ -237,14 +271,55 @@ def __init__(
self.depth_map = None
self.logs = {}
+ if self.mock:
+ import tests.mock_cv2 as cv2
+ else:
+ import cv2
+
+ # TODO(alibets): Do we keep original width/height or do we define them after rotation?
+ self.rotation = None
+ if config.rotation == -90:
+ self.rotation = cv2.ROTATE_90_COUNTERCLOCKWISE
+ elif config.rotation == 90:
+ self.rotation = cv2.ROTATE_90_CLOCKWISE
+ elif config.rotation == 180:
+ self.rotation = cv2.ROTATE_180
+
+ @classmethod
+ def init_from_name(cls, name: str, config: IntelRealSenseCameraConfig | None = None, **kwargs):
+ camera_infos = find_cameras()
+ camera_names = [cam["name"] for cam in camera_infos]
+ this_name_count = Counter(camera_names)[name]
+ if this_name_count > 1:
+ # TODO(aliberts): Test this with multiple identical cameras (Aloha)
+ raise ValueError(
+ f"Multiple {name} cameras have been detected. Please use their serial number to instantiate them."
+ )
+
+ name_to_serial_dict = {cam["name"]: cam["serial_number"] for cam in camera_infos}
+ cam_sn = name_to_serial_dict[name]
+
+ if config is None:
+ config = IntelRealSenseCameraConfig()
+
+ # Overwrite the config arguments using kwargs
+ config = replace(config, **kwargs)
+
+ return cls(serial_number=cam_sn, config=config, **kwargs)
+
def connect(self):
if self.is_connected:
raise RobotDeviceAlreadyConnectedError(
- f"IntelRealSenseCamera({self.camera_index}) is already connected."
+ f"IntelRealSenseCamera({self.serial_number}) is already connected."
)
+ if self.mock:
+ import tests.mock_pyrealsense2 as rs
+ else:
+ import pyrealsense2 as rs
+
config = rs.config()
- config.enable_device(str(self.camera_index))
+ config.enable_device(str(self.serial_number))
if self.fps and self.width and self.height:
# TODO(rcadene): can we set rgb8 directly?
@@ -260,7 +335,7 @@ def connect(self):
self.camera = rs.pipeline()
try:
- self.camera.start(config)
+ profile = self.camera.start(config)
is_camera_open = True
except RuntimeError:
is_camera_open = False
@@ -269,15 +344,41 @@ def connect(self):
# If the camera doesn't work, display the camera indices corresponding to
# valid cameras.
if not is_camera_open:
- # Verify that the provided `camera_index` is valid before printing the traceback
- available_cam_ids = find_camera_indices()
- if self.camera_index not in available_cam_ids:
+ # Verify that the provided `serial_number` is valid before printing the traceback
+ camera_infos = find_cameras()
+ serial_numbers = [cam["serial_number"] for cam in camera_infos]
+ if self.serial_number not in serial_numbers:
raise ValueError(
- f"`camera_index` is expected to be one of these available cameras {available_cam_ids}, but {self.camera_index} is provided instead. "
- "To find the camera index you should use, run `python lerobot/common/robot_devices/cameras/intelrealsense.py`."
+ f"`serial_number` is expected to be one of these available cameras {serial_numbers}, but {self.serial_number} is provided instead. "
+ "To find the serial number you should use, run `python lerobot/common/robot_devices/cameras/intelrealsense.py`."
)
- raise OSError(f"Can't access IntelRealSenseCamera({self.camera_index}).")
+ raise OSError(f"Can't access IntelRealSenseCamera({self.serial_number}).")
+
+ color_stream = profile.get_stream(rs.stream.color)
+ color_profile = color_stream.as_video_stream_profile()
+ actual_fps = color_profile.fps()
+ actual_width = color_profile.width()
+ actual_height = color_profile.height()
+
+ # Using `math.isclose` since actual fps can be a float (e.g. 29.9 instead of 30)
+ if self.fps is not None and not math.isclose(self.fps, actual_fps, rel_tol=1e-3):
+ # Using `OSError` since it's a broad that encompasses issues related to device communication
+ raise OSError(
+ f"Can't set {self.fps=} for IntelRealSenseCamera({self.serial_number}). Actual value is {actual_fps}."
+ )
+ if self.width is not None and self.width != actual_width:
+ raise OSError(
+ f"Can't set {self.width=} for IntelRealSenseCamera({self.serial_number}). Actual value is {actual_width}."
+ )
+ if self.height is not None and self.height != actual_height:
+ raise OSError(
+ f"Can't set {self.height=} for IntelRealSenseCamera({self.serial_number}). Actual value is {actual_height}."
+ )
+
+ self.fps = round(actual_fps)
+ self.width = round(actual_width)
+ self.height = round(actual_height)
self.is_connected = True
@@ -293,9 +394,14 @@ def read(self, temporary_color: str | None = None) -> np.ndarray | tuple[np.ndar
"""
if not self.is_connected:
raise RobotDeviceNotConnectedError(
- f"IntelRealSenseCamera({self.camera_index}) is not connected. Try running `camera.connect()` first."
+ f"IntelRealSenseCamera({self.serial_number}) is not connected. Try running `camera.connect()` first."
)
+ if self.mock:
+ import tests.mock_cv2 as cv2
+ else:
+ import cv2
+
start_time = time.perf_counter()
frame = self.camera.wait_for_frames(timeout_ms=5000)
@@ -303,7 +409,7 @@ def read(self, temporary_color: str | None = None) -> np.ndarray | tuple[np.ndar
color_frame = frame.get_color_frame()
if not color_frame:
- raise OSError(f"Can't capture color image from IntelRealSenseCamera({self.camera_index}).")
+ raise OSError(f"Can't capture color image from IntelRealSenseCamera({self.serial_number}).")
color_image = np.asanyarray(color_frame.get_data())
@@ -323,6 +429,9 @@ def read(self, temporary_color: str | None = None) -> np.ndarray | tuple[np.ndar
f"Can't capture color image with expected height and width ({self.height} x {self.width}). ({h} x {w}) returned instead."
)
+ if self.rotation is not None:
+ color_image = cv2.rotate(color_image, self.rotation)
+
# log the number of seconds it took to read the image
self.logs["delta_timestamp_s"] = time.perf_counter() - start_time
@@ -332,7 +441,7 @@ def read(self, temporary_color: str | None = None) -> np.ndarray | tuple[np.ndar
if self.use_depth:
depth_frame = frame.get_depth_frame()
if not depth_frame:
- raise OSError(f"Can't capture depth image from IntelRealSenseCamera({self.camera_index}).")
+ raise OSError(f"Can't capture depth image from IntelRealSenseCamera({self.serial_number}).")
depth_map = np.asanyarray(depth_frame.get_data())
@@ -342,12 +451,15 @@ def read(self, temporary_color: str | None = None) -> np.ndarray | tuple[np.ndar
f"Can't capture depth map with expected height and width ({self.height} x {self.width}). ({h} x {w}) returned instead."
)
+ if self.rotation is not None:
+ depth_map = cv2.rotate(depth_map, self.rotation)
+
return color_image, depth_map
else:
return color_image
def read_loop(self):
- while self.stop_event is None or not self.stop_event.is_set():
+ while not self.stop_event.is_set():
if self.use_depth:
self.color_image, self.depth_map = self.read()
else:
@@ -357,7 +469,7 @@ def async_read(self):
"""Access the latest color image"""
if not self.is_connected:
raise RobotDeviceNotConnectedError(
- f"IntelRealSenseCamera({self.camera_index}) is not connected. Try running `camera.connect()` first."
+ f"IntelRealSenseCamera({self.serial_number}) is not connected. Try running `camera.connect()` first."
)
if self.thread is None:
@@ -368,6 +480,7 @@ def async_read(self):
num_tries = 0
while self.color_image is None:
+ # TODO(rcadene, aliberts): intelrealsense has diverged compared to opencv over here
num_tries += 1
time.sleep(1 / self.fps)
if num_tries > self.fps and (self.thread.ident is None or not self.thread.is_alive()):
@@ -383,7 +496,7 @@ def async_read(self):
def disconnect(self):
if not self.is_connected:
raise RobotDeviceNotConnectedError(
- f"IntelRealSenseCamera({self.camera_index}) is not connected. Try running `camera.connect()` first."
+ f"IntelRealSenseCamera({self.serial_number}) is not connected. Try running `camera.connect()` first."
)
if self.thread is not None and self.thread.is_alive():
@@ -408,11 +521,11 @@ def __del__(self):
description="Save a few frames using `IntelRealSenseCamera` for all cameras connected to the computer, or a selected subset."
)
parser.add_argument(
- "--camera-ids",
+ "--serial-numbers",
type=int,
nargs="*",
default=None,
- help="List of camera indices used to instantiate the `IntelRealSenseCamera`. If not provided, find and use all available camera indices.",
+ help="List of serial numbers used to instantiate the `IntelRealSenseCamera`. If not provided, find and use all available camera indices.",
)
parser.add_argument(
"--fps",
diff --git a/lerobot/common/robot_devices/cameras/opencv.py b/lerobot/common/robot_devices/cameras/opencv.py
index b066a451a..d284cf55a 100644
--- a/lerobot/common/robot_devices/cameras/opencv.py
+++ b/lerobot/common/robot_devices/cameras/opencv.py
@@ -13,7 +13,6 @@
from pathlib import Path
from threading import Thread
-import cv2
import numpy as np
from PIL import Image
@@ -24,10 +23,6 @@
)
from lerobot.common.utils.utils import capture_timestamp_utc
-# Use 1 thread to avoid blocking the main thread. Especially useful during data collection
-# when other threads are used to save the images.
-cv2.setNumThreads(1)
-
# The maximum opencv device index depends on your operating system. For instance,
# if you have 3 cameras, they should be associated to index 0, 1, and 2. This is the case
# on MacOS. However, on Ubuntu, the indices are different like 6, 16, 23.
@@ -36,20 +31,44 @@
MAX_OPENCV_INDEX = 60
-def find_camera_indices(raise_when_empty=False, max_index_search_range=MAX_OPENCV_INDEX):
+def find_cameras(raise_when_empty=False, max_index_search_range=MAX_OPENCV_INDEX, mock=False) -> list[dict]:
+ cameras = []
if platform.system() == "Linux":
- # Linux uses camera ports
print("Linux detected. Finding available camera indices through scanning '/dev/video*' ports")
- possible_camera_ids = []
- for port in Path("/dev").glob("video*"):
- camera_idx = int(str(port).replace("/dev/video", ""))
- possible_camera_ids.append(camera_idx)
+ possible_ports = [str(port) for port in Path("/dev").glob("video*")]
+ ports = _find_cameras(possible_ports, mock=mock)
+ for port in ports:
+ cameras.append(
+ {
+ "port": port,
+ "index": int(port.removeprefix("/dev/video")),
+ }
+ )
else:
print(
"Mac or Windows detected. Finding available camera indices through "
f"scanning all indices from 0 to {MAX_OPENCV_INDEX}"
)
- possible_camera_ids = range(max_index_search_range)
+ possible_indices = range(max_index_search_range)
+ indices = _find_cameras(possible_indices, mock=mock)
+ for index in indices:
+ cameras.append(
+ {
+ "port": None,
+ "index": index,
+ }
+ )
+
+ return cameras
+
+
+def _find_cameras(
+ possible_camera_ids: list[int | str], raise_when_empty=False, mock=False
+) -> list[int | str]:
+ if mock:
+ import tests.mock_cv2 as cv2
+ else:
+ import cv2
camera_ids = []
for camera_idx in possible_camera_ids:
@@ -70,6 +89,16 @@ def find_camera_indices(raise_when_empty=False, max_index_search_range=MAX_OPENC
return camera_ids
+def is_valid_unix_path(path: str) -> bool:
+ """Note: if 'path' points to a symlink, this will return True only if the target exists"""
+ p = Path(path)
+ return p.is_absolute() and p.exists()
+
+
+def get_camera_index_from_unix_port(port: Path) -> int:
+ return int(str(port.resolve()).removeprefix("/dev/video"))
+
+
def save_image(img_array, camera_index, frame_index, images_dir):
img = Image.fromarray(img_array)
path = images_dir / f"camera_{camera_index:02d}_frame_{frame_index:06d}.png"
@@ -78,19 +107,26 @@ def save_image(img_array, camera_index, frame_index, images_dir):
def save_images_from_cameras(
- images_dir: Path, camera_ids: list[int] | None = None, fps=None, width=None, height=None, record_time_s=2
+ images_dir: Path,
+ camera_ids: list | None = None,
+ fps=None,
+ width=None,
+ height=None,
+ record_time_s=2,
+ mock=False,
):
"""
Initializes all the cameras and saves images to the directory. Useful to visually identify the camera
associated to a given camera index.
"""
- if camera_ids is None:
- camera_ids = find_camera_indices()
+ if camera_ids is None or len(camera_ids) == 0:
+ camera_infos = find_cameras(mock=mock)
+ camera_ids = [cam["index"] for cam in camera_infos]
print("Connecting cameras")
cameras = []
for cam_idx in camera_ids:
- camera = OpenCVCamera(cam_idx, fps=fps, width=width, height=height)
+ camera = OpenCVCamera(cam_idx, fps=fps, width=width, height=height, mock=mock)
camera.connect()
print(
f"OpenCVCamera({camera.camera_index}, fps={camera.fps}, width={camera.width}, "
@@ -108,7 +144,7 @@ def save_images_from_cameras(
print(f"Saving images to {images_dir}")
frame_index = 0
start_time = time.perf_counter()
- with concurrent.futures.ThreadPoolExecutor(max_workers=4) as executor:
+ with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
while True:
now = time.perf_counter()
@@ -129,11 +165,11 @@ def save_images_from_cameras(
dt_s = time.perf_counter() - now
busy_wait(1 / fps - dt_s)
+ print(f"Frame: {frame_index:04d}\tLatency (ms): {(time.perf_counter() - now) * 1000:.2f}")
+
if time.perf_counter() - start_time > record_time_s:
break
- print(f"Frame: {frame_index:04d}\tLatency (ms): {(time.perf_counter() - now) * 1000:.2f}")
-
frame_index += 1
print(f"Images have been saved to {images_dir}")
@@ -156,6 +192,9 @@ class OpenCVCameraConfig:
width: int | None = None
height: int | None = None
color_mode: str = "rgb"
+ channels: int | None = None
+ rotation: int | None = None
+ mock: bool = False
def __post_init__(self):
if self.color_mode not in ["rgb", "bgr"]:
@@ -163,6 +202,11 @@ def __post_init__(self):
f"`color_mode` is expected to be 'rgb' or 'bgr', but {self.color_mode} is provided."
)
+ self.channels = 3
+
+ if self.rotation not in [-90, None, 90, 180]:
+ raise ValueError(f"`rotation` must be in [-90, None, 90, 180] (got {self.rotation})")
+
class OpenCVCamera:
"""
@@ -203,7 +247,7 @@ class OpenCVCamera:
```
"""
- def __init__(self, camera_index: int, config: OpenCVCameraConfig | None = None, **kwargs):
+ def __init__(self, camera_index: int | str, config: OpenCVCameraConfig | None = None, **kwargs):
if config is None:
config = OpenCVCameraConfig()
@@ -211,10 +255,25 @@ def __init__(self, camera_index: int, config: OpenCVCameraConfig | None = None,
config = replace(config, **kwargs)
self.camera_index = camera_index
+ self.port = None
+
+ # Linux uses ports for connecting to cameras
+ if platform.system() == "Linux":
+ if isinstance(self.camera_index, int):
+ self.port = Path(f"/dev/video{self.camera_index}")
+ elif isinstance(self.camera_index, str) and is_valid_unix_path(self.camera_index):
+ self.port = Path(self.camera_index)
+ # Retrieve the camera index from a potentially symlinked path
+ self.camera_index = get_camera_index_from_unix_port(self.port)
+ else:
+ raise ValueError(f"Please check the provided camera_index: {camera_index}")
+
self.fps = config.fps
self.width = config.width
self.height = config.height
+ self.channels = config.channels
self.color_mode = config.color_mode
+ self.mock = config.mock
self.camera = None
self.is_connected = False
@@ -223,43 +282,60 @@ def __init__(self, camera_index: int, config: OpenCVCameraConfig | None = None,
self.color_image = None
self.logs = {}
+ if self.mock:
+ import tests.mock_cv2 as cv2
+ else:
+ import cv2
+
+ # TODO(aliberts): Do we keep original width/height or do we define them after rotation?
+ self.rotation = None
+ if config.rotation == -90:
+ self.rotation = cv2.ROTATE_90_COUNTERCLOCKWISE
+ elif config.rotation == 90:
+ self.rotation = cv2.ROTATE_90_CLOCKWISE
+ elif config.rotation == 180:
+ self.rotation = cv2.ROTATE_180
+
def connect(self):
if self.is_connected:
raise RobotDeviceAlreadyConnectedError(f"OpenCVCamera({self.camera_index}) is already connected.")
- # First create a temporary camera trying to access `camera_index`,
- # and verify it is a valid camera by calling `isOpened`.
-
- if platform.system() == "Linux":
- # Linux uses ports for connecting to cameras
- tmp_camera = cv2.VideoCapture(f"/dev/video{self.camera_index}")
+ if self.mock:
+ import tests.mock_cv2 as cv2
else:
- tmp_camera = cv2.VideoCapture(self.camera_index)
+ import cv2
+ # Use 1 thread to avoid blocking the main thread. Especially useful during data collection
+ # when other threads are used to save the images.
+ cv2.setNumThreads(1)
+
+ camera_idx = f"/dev/video{self.camera_index}" if platform.system() == "Linux" else self.camera_index
+ # First create a temporary camera trying to access `camera_index`,
+ # and verify it is a valid camera by calling `isOpened`.
+ tmp_camera = cv2.VideoCapture(camera_idx)
is_camera_open = tmp_camera.isOpened()
# Release camera to make it accessible for `find_camera_indices`
+ tmp_camera.release()
del tmp_camera
# If the camera doesn't work, display the camera indices corresponding to
# valid cameras.
if not is_camera_open:
# Verify that the provided `camera_index` is valid before printing the traceback
- available_cam_ids = find_camera_indices()
+ cameras_info = find_cameras()
+ available_cam_ids = [cam["index"] for cam in cameras_info]
if self.camera_index not in available_cam_ids:
raise ValueError(
f"`camera_index` is expected to be one of these available cameras {available_cam_ids}, but {self.camera_index} is provided instead. "
"To find the camera index you should use, run `python lerobot/common/robot_devices/cameras/opencv.py`."
)
- raise OSError(f"Can't access OpenCVCamera({self.camera_index}).")
+ raise OSError(f"Can't access OpenCVCamera({camera_idx}).")
# Secondly, create the camera that will be used downstream.
# Note: For some unknown reason, calling `isOpened` blocks the camera which then
# needs to be re-created.
- if platform.system() == "Linux":
- self.camera = cv2.VideoCapture(f"/dev/video{self.camera_index}")
- else:
- self.camera = cv2.VideoCapture(self.camera_index)
+ self.camera = cv2.VideoCapture(camera_idx)
if self.fps is not None:
self.camera.set(cv2.CAP_PROP_FPS, self.fps)
@@ -272,22 +348,24 @@ def connect(self):
actual_width = self.camera.get(cv2.CAP_PROP_FRAME_WIDTH)
actual_height = self.camera.get(cv2.CAP_PROP_FRAME_HEIGHT)
+ # Using `math.isclose` since actual fps can be a float (e.g. 29.9 instead of 30)
if self.fps is not None and not math.isclose(self.fps, actual_fps, rel_tol=1e-3):
+ # Using `OSError` since it's a broad that encompasses issues related to device communication
raise OSError(
f"Can't set {self.fps=} for OpenCVCamera({self.camera_index}). Actual value is {actual_fps}."
)
- if self.width is not None and self.width != actual_width:
+ if self.width is not None and not math.isclose(self.width, actual_width, rel_tol=1e-3):
raise OSError(
f"Can't set {self.width=} for OpenCVCamera({self.camera_index}). Actual value is {actual_width}."
)
- if self.height is not None and self.height != actual_height:
+ if self.height is not None and not math.isclose(self.height, actual_height, rel_tol=1e-3):
raise OSError(
f"Can't set {self.height=} for OpenCVCamera({self.camera_index}). Actual value is {actual_height}."
)
- self.fps = actual_fps
- self.width = actual_width
- self.height = actual_height
+ self.fps = round(actual_fps)
+ self.width = round(actual_width)
+ self.height = round(actual_height)
self.is_connected = True
@@ -306,6 +384,7 @@ def read(self, temporary_color_mode: str | None = None) -> np.ndarray:
start_time = time.perf_counter()
ret, color_image = self.camera.read()
+
if not ret:
raise OSError(f"Can't capture color image from camera {self.camera_index}.")
@@ -320,6 +399,11 @@ def read(self, temporary_color_mode: str | None = None) -> np.ndarray:
# However, Deep Learning framework such as LeRobot uses RGB format as default to train neural networks,
# so we convert the image color from BGR to RGB.
if requested_color_mode == "rgb":
+ if self.mock:
+ import tests.mock_cv2 as cv2
+ else:
+ import cv2
+
color_image = cv2.cvtColor(color_image, cv2.COLOR_BGR2RGB)
h, w, _ = color_image.shape
@@ -328,17 +412,25 @@ def read(self, temporary_color_mode: str | None = None) -> np.ndarray:
f"Can't capture color image with expected height and width ({self.height} x {self.width}). ({h} x {w}) returned instead."
)
+ if self.rotation is not None:
+ color_image = cv2.rotate(color_image, self.rotation)
+
# log the number of seconds it took to read the image
self.logs["delta_timestamp_s"] = time.perf_counter() - start_time
# log the utc time at which the image was received
self.logs["timestamp_utc"] = capture_timestamp_utc()
+ self.color_image = color_image
+
return color_image
def read_loop(self):
- while self.stop_event is None or not self.stop_event.is_set():
- self.color_image = self.read()
+ while not self.stop_event.is_set():
+ try:
+ self.color_image = self.read()
+ except Exception as e:
+ print(f"Error reading in thread: {e}")
def async_read(self):
if not self.is_connected:
@@ -353,15 +445,14 @@ def async_read(self):
self.thread.start()
num_tries = 0
- while self.color_image is None:
- num_tries += 1
- time.sleep(1 / self.fps)
- if num_tries > self.fps and (self.thread.ident is None or not self.thread.is_alive()):
- raise Exception(
- "The thread responsible for `self.async_read()` took too much time to start. There might be an issue. Verify that `self.thread.start()` has been called."
- )
+ while True:
+ if self.color_image is not None:
+ return self.color_image
- return self.color_image
+ time.sleep(1 / self.fps)
+ num_tries += 1
+ if num_tries > self.fps * 2:
+ raise TimeoutError("Timed out waiting for async_read() to start.")
def disconnect(self):
if not self.is_connected:
@@ -369,16 +460,14 @@ def disconnect(self):
f"OpenCVCamera({self.camera_index}) is not connected. Try running `camera.connect()` first."
)
- if self.thread is not None and self.thread.is_alive():
- # wait for the thread to finish
+ if self.thread is not None:
self.stop_event.set()
- self.thread.join()
+ self.thread.join() # wait for the thread to finish
self.thread = None
self.stop_event = None
self.camera.release()
self.camera = None
-
self.is_connected = False
def __del__(self):
@@ -424,7 +513,7 @@ def __del__(self):
parser.add_argument(
"--record-time-s",
type=float,
- default=2.0,
+ default=4.0,
help="Set the number of seconds used to record the frames. By default, 2 seconds.",
)
args = parser.parse_args()
diff --git a/lerobot/common/robot_devices/cameras/utils.py b/lerobot/common/robot_devices/cameras/utils.py
index 0f329d9fb..7904a57a5 100644
--- a/lerobot/common/robot_devices/cameras/utils.py
+++ b/lerobot/common/robot_devices/cameras/utils.py
@@ -1,55 +1,8 @@
-from pathlib import Path
from typing import Protocol
-import cv2
-import einops
import numpy as np
-def write_shape_on_image_inplace(image):
- height, width = image.shape[:2]
- text = f"Width: {width} Height: {height}"
-
- # Define the font, scale, color, and thickness
- font = cv2.FONT_HERSHEY_SIMPLEX
- font_scale = 1
- color = (255, 0, 0) # Blue in BGR
- thickness = 2
-
- position = (10, height - 10) # 10 pixels from the bottom-left corner
- cv2.putText(image, text, position, font, font_scale, color, thickness)
-
-
-def save_color_image(image, path, write_shape=False):
- path = Path(path)
- path.parent.mkdir(parents=True, exist_ok=True)
- if write_shape:
- write_shape_on_image_inplace(image)
- cv2.imwrite(str(path), image)
-
-
-def save_depth_image(depth, path, write_shape=False):
- path = Path(path)
- path.parent.mkdir(parents=True, exist_ok=True)
-
- # Apply colormap on depth image (image must be converted to 8-bit per pixel first)
- depth_image = cv2.applyColorMap(cv2.convertScaleAbs(depth, alpha=0.03), cv2.COLORMAP_JET)
-
- if write_shape:
- write_shape_on_image_inplace(depth_image)
- cv2.imwrite(str(path), depth_image)
-
-
-def convert_torch_image_to_cv2(tensor, rgb_to_bgr=True):
- assert tensor.ndim == 3
- c, h, w = tensor.shape
- assert c < h and c < w
- color_image = einops.rearrange(tensor, "c h w -> h w c").numpy()
- if rgb_to_bgr:
- color_image = cv2.cvtColor(color_image, cv2.COLOR_RGB2BGR)
- return color_image
-
-
# Defines a camera type
class Camera(Protocol):
def connect(self): ...
diff --git a/lerobot/common/robot_devices/control_utils.py b/lerobot/common/robot_devices/control_utils.py
new file mode 100644
index 000000000..9b9649dde
--- /dev/null
+++ b/lerobot/common/robot_devices/control_utils.py
@@ -0,0 +1,363 @@
+########################################################################################
+# Utilities
+########################################################################################
+
+
+import logging
+import time
+import traceback
+from contextlib import nullcontext
+from copy import copy
+from functools import cache
+
+import cv2
+import torch
+import tqdm
+from deepdiff import DeepDiff
+from termcolor import colored
+
+from lerobot.common.datasets.image_writer import safe_stop_image_writer
+from lerobot.common.datasets.lerobot_dataset import LeRobotDataset
+from lerobot.common.datasets.utils import get_features_from_robot
+from lerobot.common.policies.factory import make_policy
+from lerobot.common.robot_devices.robots.utils import Robot
+from lerobot.common.robot_devices.utils import busy_wait
+from lerobot.common.utils.utils import get_safe_torch_device, init_hydra_config, set_global_seed
+from lerobot.scripts.eval import get_pretrained_policy_path
+
+
+def log_control_info(robot: Robot, dt_s, episode_index=None, frame_index=None, fps=None):
+ log_items = []
+ if episode_index is not None:
+ log_items.append(f"ep:{episode_index}")
+ if frame_index is not None:
+ log_items.append(f"frame:{frame_index}")
+
+ def log_dt(shortname, dt_val_s):
+ nonlocal log_items, fps
+ info_str = f"{shortname}:{dt_val_s * 1000:5.2f} ({1/ dt_val_s:3.1f}hz)"
+ if fps is not None:
+ actual_fps = 1 / dt_val_s
+ if actual_fps < fps - 1:
+ info_str = colored(info_str, "yellow")
+ log_items.append(info_str)
+
+ # total step time displayed in milliseconds and its frequency
+ log_dt("dt", dt_s)
+
+ # TODO(aliberts): move robot-specific logs logic in robot.print_logs()
+ if not robot.robot_type.startswith("stretch"):
+ for name in robot.leader_arms:
+ key = f"read_leader_{name}_pos_dt_s"
+ if key in robot.logs:
+ log_dt("dtRlead", robot.logs[key])
+
+ for name in robot.follower_arms:
+ key = f"write_follower_{name}_goal_pos_dt_s"
+ if key in robot.logs:
+ log_dt("dtWfoll", robot.logs[key])
+
+ key = f"read_follower_{name}_pos_dt_s"
+ if key in robot.logs:
+ log_dt("dtRfoll", robot.logs[key])
+
+ for name in robot.cameras:
+ key = f"read_camera_{name}_dt_s"
+ if key in robot.logs:
+ log_dt(f"dtR{name}", robot.logs[key])
+
+ info_str = " ".join(log_items)
+ logging.info(info_str)
+
+
+@cache
+def is_headless():
+ """Detects if python is running without a monitor."""
+ try:
+ import pynput # noqa
+
+ return False
+ except Exception:
+ print(
+ "Error trying to import pynput. Switching to headless mode. "
+ "As a result, the video stream from the cameras won't be shown, "
+ "and you won't be able to change the control flow with keyboards. "
+ "For more info, see traceback below.\n"
+ )
+ traceback.print_exc()
+ print()
+ return True
+
+
+def has_method(_object: object, method_name: str):
+ return hasattr(_object, method_name) and callable(getattr(_object, method_name))
+
+
+def predict_action(observation, policy, device, use_amp):
+ observation = copy(observation)
+ with (
+ torch.inference_mode(),
+ torch.autocast(device_type=device.type) if device.type == "cuda" and use_amp else nullcontext(),
+ ):
+ # Convert to pytorch format: channel first and float32 in [0,1] with batch dimension
+ for name in observation:
+ if "image" in name:
+ observation[name] = observation[name].type(torch.float32) / 255
+ observation[name] = observation[name].permute(2, 0, 1).contiguous()
+ observation[name] = observation[name].unsqueeze(0)
+ observation[name] = observation[name].to(device)
+
+ # Compute the next action with the policy
+ # based on the current observation
+ action = policy.select_action(observation)
+
+ # Remove batch dimension
+ action = action.squeeze(0)
+
+ # Move to cpu, if not already the case
+ action = action.to("cpu")
+
+ return action
+
+
+def init_keyboard_listener():
+ # Allow to exit early while recording an episode or resetting the environment,
+ # by tapping the right arrow key '->'. This might require a sudo permission
+ # to allow your terminal to monitor keyboard events.
+ events = {}
+ events["exit_early"] = False
+ events["rerecord_episode"] = False
+ events["stop_recording"] = False
+
+ if is_headless():
+ logging.warning(
+ "Headless environment detected. On-screen cameras display and keyboard inputs will not be available."
+ )
+ listener = None
+ return listener, events
+
+ # Only import pynput if not in a headless environment
+ from pynput import keyboard
+
+ def on_press(key):
+ try:
+ if key == keyboard.Key.right:
+ print("Right arrow key pressed. Exiting loop...")
+ events["exit_early"] = True
+ elif key == keyboard.Key.left:
+ print("Left arrow key pressed. Exiting loop and rerecord the last episode...")
+ events["rerecord_episode"] = True
+ events["exit_early"] = True
+ elif key == keyboard.Key.esc:
+ print("Escape key pressed. Stopping data recording...")
+ events["stop_recording"] = True
+ events["exit_early"] = True
+ except Exception as e:
+ print(f"Error handling key press: {e}")
+
+ listener = keyboard.Listener(on_press=on_press)
+ listener.start()
+
+ return listener, events
+
+
+def init_policy(pretrained_policy_name_or_path, policy_overrides):
+ """Instantiate the policy and load fps, device and use_amp from config yaml"""
+ pretrained_policy_path = get_pretrained_policy_path(pretrained_policy_name_or_path)
+ hydra_cfg = init_hydra_config(pretrained_policy_path / "config.yaml", policy_overrides)
+ policy = make_policy(hydra_cfg=hydra_cfg, pretrained_policy_name_or_path=pretrained_policy_path)
+
+ # Check device is available
+ device = get_safe_torch_device(hydra_cfg.device, log=True)
+ use_amp = hydra_cfg.use_amp
+ policy_fps = hydra_cfg.env.fps
+
+ policy.eval()
+ policy.to(device)
+
+ torch.backends.cudnn.benchmark = True
+ torch.backends.cuda.matmul.allow_tf32 = True
+ set_global_seed(hydra_cfg.seed)
+ return policy, policy_fps, device, use_amp
+
+
+def warmup_record(
+ robot,
+ events,
+ enable_teloperation,
+ warmup_time_s,
+ display_cameras,
+ fps,
+):
+ control_loop(
+ robot=robot,
+ control_time_s=warmup_time_s,
+ display_cameras=display_cameras,
+ events=events,
+ fps=fps,
+ teleoperate=enable_teloperation,
+ )
+
+
+def record_episode(
+ robot,
+ dataset,
+ events,
+ episode_time_s,
+ display_cameras,
+ policy,
+ device,
+ use_amp,
+ fps,
+):
+ control_loop(
+ robot=robot,
+ control_time_s=episode_time_s,
+ display_cameras=display_cameras,
+ dataset=dataset,
+ events=events,
+ policy=policy,
+ device=device,
+ use_amp=use_amp,
+ fps=fps,
+ teleoperate=policy is None,
+ )
+
+
+@safe_stop_image_writer
+def control_loop(
+ robot,
+ control_time_s=None,
+ teleoperate=False,
+ display_cameras=False,
+ dataset: LeRobotDataset | None = None,
+ events=None,
+ policy=None,
+ device=None,
+ use_amp=None,
+ fps=None,
+):
+ # TODO(rcadene): Add option to record logs
+ if not robot.is_connected:
+ robot.connect()
+
+ if events is None:
+ events = {"exit_early": False}
+
+ if control_time_s is None:
+ control_time_s = float("inf")
+
+ if teleoperate and policy is not None:
+ raise ValueError("When `teleoperate` is True, `policy` should be None.")
+
+ if dataset is not None and fps is not None and dataset.fps != fps:
+ raise ValueError(f"The dataset fps should be equal to requested fps ({dataset['fps']} != {fps}).")
+
+ timestamp = 0
+ start_episode_t = time.perf_counter()
+ while timestamp < control_time_s:
+ start_loop_t = time.perf_counter()
+
+ if teleoperate:
+ observation, action = robot.teleop_step(record_data=True)
+ else:
+ observation = robot.capture_observation()
+
+ if policy is not None:
+ pred_action = predict_action(observation, policy, device, use_amp)
+ # Action can eventually be clipped using `max_relative_target`,
+ # so action actually sent is saved in the dataset.
+ action = robot.send_action(pred_action)
+ action = {"action": action}
+
+ if dataset is not None:
+ frame = {**observation, **action}
+ dataset.add_frame(frame)
+
+ if display_cameras and not is_headless():
+ image_keys = [key for key in observation if "image" in key]
+ for key in image_keys:
+ cv2.imshow(key, cv2.cvtColor(observation[key].numpy(), cv2.COLOR_RGB2BGR))
+ cv2.waitKey(1)
+
+ if fps is not None:
+ dt_s = time.perf_counter() - start_loop_t
+ busy_wait(1 / fps - dt_s)
+
+ dt_s = time.perf_counter() - start_loop_t
+ log_control_info(robot, dt_s, fps=fps)
+
+ timestamp = time.perf_counter() - start_episode_t
+ if events["exit_early"]:
+ events["exit_early"] = False
+ break
+
+
+def reset_environment(robot, events, reset_time_s):
+ # TODO(rcadene): refactor warmup_record and reset_environment
+ # TODO(alibets): allow for teleop during reset
+ if has_method(robot, "teleop_safety_stop"):
+ robot.teleop_safety_stop()
+
+ timestamp = 0
+ start_vencod_t = time.perf_counter()
+
+ # Wait if necessary
+ with tqdm.tqdm(total=reset_time_s, desc="Waiting") as pbar:
+ while timestamp < reset_time_s:
+ time.sleep(1)
+ timestamp = time.perf_counter() - start_vencod_t
+ pbar.update(1)
+ if events["exit_early"]:
+ events["exit_early"] = False
+ break
+
+
+def stop_recording(robot, listener, display_cameras):
+ robot.disconnect()
+
+ if not is_headless():
+ if listener is not None:
+ listener.stop()
+
+ if display_cameras:
+ cv2.destroyAllWindows()
+
+
+def sanity_check_dataset_name(repo_id, policy):
+ _, dataset_name = repo_id.split("/")
+ # either repo_id doesnt start with "eval_" and there is no policy
+ # or repo_id starts with "eval_" and there is a policy
+
+ # Check if dataset_name starts with "eval_" but policy is missing
+ if dataset_name.startswith("eval_") and policy is None:
+ raise ValueError(
+ f"Your dataset name begins with 'eval_' ({dataset_name}), but no policy is provided."
+ )
+
+ # Check if dataset_name does not start with "eval_" but policy is provided
+ if not dataset_name.startswith("eval_") and policy is not None:
+ raise ValueError(
+ f"Your dataset name does not begin with 'eval_' ({dataset_name}), but a policy is provided ({policy})."
+ )
+
+
+def sanity_check_dataset_robot_compatibility(
+ dataset: LeRobotDataset, robot: Robot, fps: int, use_videos: bool
+) -> None:
+ fields = [
+ ("robot_type", dataset.meta.robot_type, robot.robot_type),
+ ("fps", dataset.fps, fps),
+ ("features", dataset.features, get_features_from_robot(robot, use_videos)),
+ ]
+
+ mismatches = []
+ for field, dataset_value, present_value in fields:
+ diff = DeepDiff(dataset_value, present_value, exclude_regex_paths=[r".*\['info'\]$"])
+ if diff:
+ mismatches.append(f"{field}: expected {present_value}, got {dataset_value}")
+
+ if mismatches:
+ raise ValueError(
+ "Dataset metadata compatibility check failed with mismatches:\n" + "\n".join(mismatches)
+ )
diff --git a/lerobot/common/robot_devices/motors/dynamixel.py b/lerobot/common/robot_devices/motors/dynamixel.py
index 491963fed..1e1396f76 100644
--- a/lerobot/common/robot_devices/motors/dynamixel.py
+++ b/lerobot/common/robot_devices/motors/dynamixel.py
@@ -4,21 +4,9 @@
import time
import traceback
from copy import deepcopy
-from pathlib import Path
import numpy as np
import tqdm
-from dynamixel_sdk import (
- COMM_SUCCESS,
- DXL_HIBYTE,
- DXL_HIWORD,
- DXL_LOBYTE,
- DXL_LOWORD,
- GroupSyncRead,
- GroupSyncWrite,
- PacketHandler,
- PortHandler,
-)
from lerobot.common.robot_devices.utils import RobotDeviceAlreadyConnectedError, RobotDeviceNotConnectedError
from lerobot.common.utils.utils import capture_timestamp_utc
@@ -166,24 +154,29 @@ def convert_degrees_to_steps(degrees: float | np.ndarray, models: str | list[str
return steps
-def convert_to_bytes(value, bytes):
+def convert_to_bytes(value, bytes, mock=False):
+ if mock:
+ return value
+
+ import dynamixel_sdk as dxl
+
# Note: No need to convert back into unsigned int, since this byte preprocessing
# already handles it for us.
if bytes == 1:
data = [
- DXL_LOBYTE(DXL_LOWORD(value)),
+ dxl.DXL_LOBYTE(dxl.DXL_LOWORD(value)),
]
elif bytes == 2:
data = [
- DXL_LOBYTE(DXL_LOWORD(value)),
- DXL_HIBYTE(DXL_LOWORD(value)),
+ dxl.DXL_LOBYTE(dxl.DXL_LOWORD(value)),
+ dxl.DXL_HIBYTE(dxl.DXL_LOWORD(value)),
]
elif bytes == 4:
data = [
- DXL_LOBYTE(DXL_LOWORD(value)),
- DXL_HIBYTE(DXL_LOWORD(value)),
- DXL_LOBYTE(DXL_HIWORD(value)),
- DXL_HIBYTE(DXL_HIWORD(value)),
+ dxl.DXL_LOBYTE(dxl.DXL_LOWORD(value)),
+ dxl.DXL_HIBYTE(dxl.DXL_LOWORD(value)),
+ dxl.DXL_LOBYTE(dxl.DXL_HIWORD(value)),
+ dxl.DXL_HIBYTE(dxl.DXL_HIWORD(value)),
]
else:
raise NotImplementedError(
@@ -235,35 +228,6 @@ def assert_same_address(model_ctrl_table, motor_models, data_name):
)
-def find_available_ports():
- ports = []
- for path in Path("/dev").glob("tty*"):
- ports.append(str(path))
- return ports
-
-
-def find_port():
- print("Finding all available ports for the DynamixelMotorsBus.")
- ports_before = find_available_ports()
- print(ports_before)
-
- print("Remove the usb cable from your DynamixelMotorsBus and press Enter when done.")
- input()
-
- time.sleep(0.5)
- ports_after = find_available_ports()
- ports_diff = list(set(ports_before) - set(ports_after))
-
- if len(ports_diff) == 1:
- port = ports_diff[0]
- print(f"The port of this DynamixelMotorsBus is '{port}'")
- print("Reconnect the usb cable.")
- elif len(ports_diff) == 0:
- raise OSError(f"Could not detect the port. No difference was found ({ports_diff}).")
- else:
- raise OSError(f"Could not detect the port. More than one port was found ({ports_diff}).")
-
-
class TorqueMode(enum.Enum):
ENABLED = 1
DISABLED = 0
@@ -296,8 +260,8 @@ class DynamixelMotorsBus:
A DynamixelMotorsBus instance requires a port (e.g. `DynamixelMotorsBus(port="/dev/tty.usbmodem575E0031751"`)).
To find the port, you can run our utility script:
```bash
- python lerobot/common/robot_devices/motors/dynamixel.py
- >>> Finding all available ports for the DynamixelMotorsBus.
+ python lerobot/scripts/find_motors_bus_port.py
+ >>> Finding all available ports for the MotorBus.
>>> ['/dev/tty.usbmodem575E0032081', '/dev/tty.usbmodem575E0031751']
>>> Remove the usb cable from your DynamixelMotorsBus and press Enter when done.
>>> The port of this DynamixelMotorsBus is /dev/tty.usbmodem575E0031751.
@@ -333,9 +297,11 @@ def __init__(
motors: dict[str, tuple[int, str]],
extra_model_control_table: dict[str, list[tuple]] | None = None,
extra_model_resolution: dict[str, int] | None = None,
+ mock=False,
):
self.port = port
self.motors = motors
+ self.mock = mock
self.model_ctrl_table = deepcopy(MODEL_CONTROL_TABLE)
if extra_model_control_table:
@@ -359,8 +325,13 @@ def connect(self):
f"DynamixelMotorsBus({self.port}) is already connected. Do not call `motors_bus.connect()` twice."
)
- self.port_handler = PortHandler(self.port)
- self.packet_handler = PacketHandler(PROTOCOL_VERSION)
+ if self.mock:
+ import tests.mock_dynamixel_sdk as dxl
+ else:
+ import dynamixel_sdk as dxl
+
+ self.port_handler = dxl.PortHandler(self.port)
+ self.packet_handler = dxl.PacketHandler(PROTOCOL_VERSION)
try:
if not self.port_handler.openPort():
@@ -368,7 +339,7 @@ def connect(self):
except Exception:
traceback.print_exc()
print(
- "\nTry running `python lerobot/common/robot_devices/motors/dynamixel.py` to make sure you are using the correct port.\n"
+ "\nTry running `python lerobot/scripts/find_motors_bus_port.py` to make sure you are using the correct port.\n"
)
raise
@@ -377,25 +348,18 @@ def connect(self):
self.port_handler.setPacketTimeoutMillis(TIMEOUT_MS)
- # Set expected baudrate for the bus
- self.set_bus_baudrate(BAUDRATE)
+ def reconnect(self):
+ if self.mock:
+ import tests.mock_dynamixel_sdk as dxl
+ else:
+ import dynamixel_sdk as dxl
- if not self.are_motors_configured():
- input(
- "\n/!\\ A configuration issue has been detected with your motors: \n"
- "If it's the first time that you use these motors, press enter to configure your motors... but before "
- "verify that all the cables are connected the proper way. If you find an issue, before making a modification, "
- "kill the python process, unplug the power cord to not damage the motors, rewire correctly, then plug the power "
- "again and relaunch the script.\n"
- )
- print()
- self.configure_motors()
+ self.port_handler = dxl.PortHandler(self.port)
+ self.packet_handler = dxl.PacketHandler(PROTOCOL_VERSION)
- def reconnect(self):
- self.port_handler = PortHandler(self.port)
- self.packet_handler = PacketHandler(PROTOCOL_VERSION)
if not self.port_handler.openPort():
raise OSError(f"Failed to open port '{self.port}'.")
+
self.is_connected = True
def are_motors_configured(self):
@@ -407,120 +371,14 @@ def are_motors_configured(self):
print(e)
return False
- def configure_motors(self):
- # TODO(rcadene): This script assumes motors follow the X_SERIES baudrates
- # TODO(rcadene): Refactor this function with intermediate high-level functions
-
- print("Scanning all baudrates and motor indices")
- all_baudrates = set(X_SERIES_BAUDRATE_TABLE.values())
- ids_per_baudrate = {}
- for baudrate in all_baudrates:
- self.set_bus_baudrate(baudrate)
- present_ids = self.find_motor_indices()
- if len(present_ids) > 0:
- ids_per_baudrate[baudrate] = present_ids
- print(f"Motor indices detected: {ids_per_baudrate}")
- print()
-
- possible_baudrates = list(ids_per_baudrate.keys())
- possible_ids = list({idx for sublist in ids_per_baudrate.values() for idx in sublist})
- untaken_ids = list(set(range(MAX_ID_RANGE)) - set(possible_ids) - set(self.motor_indices))
-
- # Connect successively one motor to the chain and write a unique random index for each
- for i in range(len(self.motors)):
- self.disconnect()
- input(
- "1. Unplug the power cord\n"
- "2. Plug/unplug minimal number of cables to only have the first "
- f"{i+1} motor(s) ({self.motor_names[:i+1]}) connected.\n"
- "3. Re-plug the power cord\n"
- "Press Enter to continue..."
- )
- print()
- self.reconnect()
-
- if i > 0:
- try:
- self._read_with_motor_ids(self.motor_models, untaken_ids[:i], "ID")
- except ConnectionError:
- print(f"Failed to read from {untaken_ids[:i+1]}. Make sure the power cord is plugged in.")
- input("Press Enter to continue...")
- print()
- self.reconnect()
-
- print("Scanning possible baudrates and motor indices")
- motor_found = False
- for baudrate in possible_baudrates:
- self.set_bus_baudrate(baudrate)
- present_ids = self.find_motor_indices(possible_ids)
- if len(present_ids) == 1:
- present_idx = present_ids[0]
- print(f"Detected motor with index {present_idx}")
-
- if baudrate != BAUDRATE:
- print(f"Setting its baudrate to {BAUDRATE}")
- baudrate_idx = list(X_SERIES_BAUDRATE_TABLE.values()).index(BAUDRATE)
-
- # The write can fail, so we allow retries
- for _ in range(NUM_WRITE_RETRY):
- self._write_with_motor_ids(
- self.motor_models, present_idx, "Baud_Rate", baudrate_idx
- )
- time.sleep(0.5)
- self.set_bus_baudrate(BAUDRATE)
- try:
- present_baudrate_idx = self._read_with_motor_ids(
- self.motor_models, present_idx, "Baud_Rate"
- )
- except ConnectionError:
- print("Failed to write baudrate. Retrying.")
- self.set_bus_baudrate(baudrate)
- continue
- break
- else:
- raise
-
- if present_baudrate_idx != baudrate_idx:
- raise OSError("Failed to write baudrate.")
-
- print(f"Setting its index to a temporary untaken index ({untaken_ids[i]})")
- self._write_with_motor_ids(self.motor_models, present_idx, "ID", untaken_ids[i])
-
- present_idx = self._read_with_motor_ids(self.motor_models, untaken_ids[i], "ID")
- if present_idx != untaken_ids[i]:
- raise OSError("Failed to write index.")
-
- motor_found = True
- break
- elif len(present_ids) > 1:
- raise OSError(f"More than one motor detected ({present_ids}), but only one was expected.")
-
- if not motor_found:
- raise OSError(
- "No motor found, but one new motor expected. Verify power cord is plugged in and retry."
- )
- print()
-
- print(f"Setting expected motor indices: {self.motor_indices}")
- self.set_bus_baudrate(BAUDRATE)
- self._write_with_motor_ids(
- self.motor_models, untaken_ids[: len(self.motors)], "ID", self.motor_indices
- )
- print()
-
- if (self.read("ID") != self.motor_indices).any():
- raise OSError("Failed to write motors indices.")
-
- print("Configuration is done!")
-
- def find_motor_indices(self, possible_ids=None):
+ def find_motor_indices(self, possible_ids=None, num_retry=2):
if possible_ids is None:
possible_ids = range(MAX_ID_RANGE)
indices = []
for idx in tqdm.tqdm(possible_ids):
try:
- present_idx = self._read_with_motor_ids(self.motor_models, [idx], "ID")[0]
+ present_idx = self.read_with_motor_ids(self.motor_models, [idx], "ID", num_retry=num_retry)[0]
except ConnectionError:
continue
@@ -780,7 +638,12 @@ def revert_calibration(self, values: np.ndarray | list, motor_names: list[str] |
values = np.round(values).astype(np.int32)
return values
- def _read_with_motor_ids(self, motor_models, motor_ids, data_name):
+ def read_with_motor_ids(self, motor_models, motor_ids, data_name, num_retry=NUM_READ_RETRY):
+ if self.mock:
+ import tests.mock_dynamixel_sdk as dxl
+ else:
+ import dynamixel_sdk as dxl
+
return_list = True
if not isinstance(motor_ids, list):
return_list = False
@@ -788,12 +651,16 @@ def _read_with_motor_ids(self, motor_models, motor_ids, data_name):
assert_same_address(self.model_ctrl_table, self.motor_models, data_name)
addr, bytes = self.model_ctrl_table[motor_models[0]][data_name]
- group = GroupSyncRead(self.port_handler, self.packet_handler, addr, bytes)
+ group = dxl.GroupSyncRead(self.port_handler, self.packet_handler, addr, bytes)
for idx in motor_ids:
group.addParam(idx)
- comm = group.txRxPacket()
- if comm != COMM_SUCCESS:
+ for _ in range(num_retry):
+ comm = group.txRxPacket()
+ if comm == dxl.COMM_SUCCESS:
+ break
+
+ if comm != dxl.COMM_SUCCESS:
raise ConnectionError(
f"Read failed due to communication error on port {self.port_handler.port_name} for indices {motor_ids}: "
f"{self.packet_handler.getTxRxResult(comm)}"
@@ -817,6 +684,11 @@ def read(self, data_name, motor_names: str | list[str] | None = None):
start_time = time.perf_counter()
+ if self.mock:
+ import tests.mock_dynamixel_sdk as dxl
+ else:
+ import dynamixel_sdk as dxl
+
if motor_names is None:
motor_names = self.motor_names
@@ -836,16 +708,18 @@ def read(self, data_name, motor_names: str | list[str] | None = None):
if data_name not in self.group_readers:
# create new group reader
- self.group_readers[group_key] = GroupSyncRead(self.port_handler, self.packet_handler, addr, bytes)
+ self.group_readers[group_key] = dxl.GroupSyncRead(
+ self.port_handler, self.packet_handler, addr, bytes
+ )
for idx in motor_ids:
self.group_readers[group_key].addParam(idx)
for _ in range(NUM_READ_RETRY):
comm = self.group_readers[group_key].txRxPacket()
- if comm == COMM_SUCCESS:
+ if comm == dxl.COMM_SUCCESS:
break
- if comm != COMM_SUCCESS:
+ if comm != dxl.COMM_SUCCESS:
raise ConnectionError(
f"Read failed due to communication error on port {self.port} for group_key {group_key}: "
f"{self.packet_handler.getTxRxResult(comm)}"
@@ -875,7 +749,12 @@ def read(self, data_name, motor_names: str | list[str] | None = None):
return values
- def _write_with_motor_ids(self, motor_models, motor_ids, data_name, values):
+ def write_with_motor_ids(self, motor_models, motor_ids, data_name, values, num_retry=NUM_WRITE_RETRY):
+ if self.mock:
+ import tests.mock_dynamixel_sdk as dxl
+ else:
+ import dynamixel_sdk as dxl
+
if not isinstance(motor_ids, list):
motor_ids = [motor_ids]
if not isinstance(values, list):
@@ -883,13 +762,17 @@ def _write_with_motor_ids(self, motor_models, motor_ids, data_name, values):
assert_same_address(self.model_ctrl_table, motor_models, data_name)
addr, bytes = self.model_ctrl_table[motor_models[0]][data_name]
- group = GroupSyncWrite(self.port_handler, self.packet_handler, addr, bytes)
+ group = dxl.GroupSyncWrite(self.port_handler, self.packet_handler, addr, bytes)
for idx, value in zip(motor_ids, values, strict=True):
- data = convert_to_bytes(value, bytes)
+ data = convert_to_bytes(value, bytes, self.mock)
group.addParam(idx, data)
- comm = group.txPacket()
- if comm != COMM_SUCCESS:
+ for _ in range(num_retry):
+ comm = group.txPacket()
+ if comm == dxl.COMM_SUCCESS:
+ break
+
+ if comm != dxl.COMM_SUCCESS:
raise ConnectionError(
f"Write failed due to communication error on port {self.port_handler.port_name} for indices {motor_ids}: "
f"{self.packet_handler.getTxRxResult(comm)}"
@@ -903,6 +786,11 @@ def write(self, data_name, values: int | float | np.ndarray, motor_names: str |
start_time = time.perf_counter()
+ if self.mock:
+ import tests.mock_dynamixel_sdk as dxl
+ else:
+ import dynamixel_sdk as dxl
+
if motor_names is None:
motor_names = self.motor_names
@@ -932,19 +820,19 @@ def write(self, data_name, values: int | float | np.ndarray, motor_names: str |
init_group = data_name not in self.group_readers
if init_group:
- self.group_writers[group_key] = GroupSyncWrite(
+ self.group_writers[group_key] = dxl.GroupSyncWrite(
self.port_handler, self.packet_handler, addr, bytes
)
for idx, value in zip(motor_ids, values, strict=True):
- data = convert_to_bytes(value, bytes)
+ data = convert_to_bytes(value, bytes, self.mock)
if init_group:
self.group_writers[group_key].addParam(idx, data)
else:
self.group_writers[group_key].changeParam(idx, data)
comm = self.group_writers[group_key].txPacket()
- if comm != COMM_SUCCESS:
+ if comm != dxl.COMM_SUCCESS:
raise ConnectionError(
f"Write failed due to communication error on port {self.port} for group_key {group_key}: "
f"{self.packet_handler.getTxRxResult(comm)}"
@@ -977,8 +865,3 @@ def disconnect(self):
def __del__(self):
if getattr(self, "is_connected", False):
self.disconnect()
-
-
-if __name__ == "__main__":
- # Helper to find the usb port associated to all your DynamixelMotorsBus.
- find_port()
diff --git a/lerobot/common/robot_devices/motors/feetech.py b/lerobot/common/robot_devices/motors/feetech.py
new file mode 100644
index 000000000..0d5480f7a
--- /dev/null
+++ b/lerobot/common/robot_devices/motors/feetech.py
@@ -0,0 +1,887 @@
+import enum
+import logging
+import math
+import time
+import traceback
+from copy import deepcopy
+
+import numpy as np
+import tqdm
+
+from lerobot.common.robot_devices.utils import RobotDeviceAlreadyConnectedError, RobotDeviceNotConnectedError
+from lerobot.common.utils.utils import capture_timestamp_utc
+
+PROTOCOL_VERSION = 0
+BAUDRATE = 1_000_000
+TIMEOUT_MS = 1000
+
+MAX_ID_RANGE = 252
+
+# The following bounds define the lower and upper joints range (after calibration).
+# For joints in degree (i.e. revolute joints), their nominal range is [-180, 180] degrees
+# which corresponds to a half rotation on the left and half rotation on the right.
+# Some joints might require higher range, so we allow up to [-270, 270] degrees until
+# an error is raised.
+LOWER_BOUND_DEGREE = -270
+UPPER_BOUND_DEGREE = 270
+# For joints in percentage (i.e. joints that move linearly like the prismatic joint of a gripper),
+# their nominal range is [0, 100] %. For instance, for Aloha gripper, 0% is fully
+# closed, and 100% is fully open. To account for slight calibration issue, we allow up to
+# [-10, 110] until an error is raised.
+LOWER_BOUND_LINEAR = -10
+UPPER_BOUND_LINEAR = 110
+
+HALF_TURN_DEGREE = 180
+
+
+# See this link for STS3215 Memory Table:
+# https://docs.google.com/spreadsheets/d/1GVs7W1VS1PqdhA1nW-abeyAHhTUxKUdR/edit?usp=sharing&ouid=116566590112741600240&rtpof=true&sd=true
+# data_name: (address, size_byte)
+SCS_SERIES_CONTROL_TABLE = {
+ "Model": (3, 2),
+ "ID": (5, 1),
+ "Baud_Rate": (6, 1),
+ "Return_Delay": (7, 1),
+ "Response_Status_Level": (8, 1),
+ "Min_Angle_Limit": (9, 2),
+ "Max_Angle_Limit": (11, 2),
+ "Max_Temperature_Limit": (13, 1),
+ "Max_Voltage_Limit": (14, 1),
+ "Min_Voltage_Limit": (15, 1),
+ "Max_Torque_Limit": (16, 2),
+ "Phase": (18, 1),
+ "Unloading_Condition": (19, 1),
+ "LED_Alarm_Condition": (20, 1),
+ "P_Coefficient": (21, 1),
+ "D_Coefficient": (22, 1),
+ "I_Coefficient": (23, 1),
+ "Minimum_Startup_Force": (24, 2),
+ "CW_Dead_Zone": (26, 1),
+ "CCW_Dead_Zone": (27, 1),
+ "Protection_Current": (28, 2),
+ "Angular_Resolution": (30, 1),
+ "Offset": (31, 2),
+ "Mode": (33, 1),
+ "Protective_Torque": (34, 1),
+ "Protection_Time": (35, 1),
+ "Overload_Torque": (36, 1),
+ "Speed_closed_loop_P_proportional_coefficient": (37, 1),
+ "Over_Current_Protection_Time": (38, 1),
+ "Velocity_closed_loop_I_integral_coefficient": (39, 1),
+ "Torque_Enable": (40, 1),
+ "Acceleration": (41, 1),
+ "Goal_Position": (42, 2),
+ "Goal_Time": (44, 2),
+ "Goal_Speed": (46, 2),
+ "Torque_Limit": (48, 2),
+ "Lock": (55, 1),
+ "Present_Position": (56, 2),
+ "Present_Speed": (58, 2),
+ "Present_Load": (60, 2),
+ "Present_Voltage": (62, 1),
+ "Present_Temperature": (63, 1),
+ "Status": (65, 1),
+ "Moving": (66, 1),
+ "Present_Current": (69, 2),
+ # Not in the Memory Table
+ "Maximum_Acceleration": (85, 2),
+}
+
+SCS_SERIES_BAUDRATE_TABLE = {
+ 0: 1_000_000,
+ 1: 500_000,
+ 2: 250_000,
+ 3: 128_000,
+ 4: 115_200,
+ 5: 57_600,
+ 6: 38_400,
+ 7: 19_200,
+}
+
+CALIBRATION_REQUIRED = ["Goal_Position", "Present_Position"]
+CONVERT_UINT32_TO_INT32_REQUIRED = ["Goal_Position", "Present_Position"]
+
+
+MODEL_CONTROL_TABLE = {
+ "scs_series": SCS_SERIES_CONTROL_TABLE,
+ "sts3215": SCS_SERIES_CONTROL_TABLE,
+}
+
+MODEL_RESOLUTION = {
+ "scs_series": 4096,
+ "sts3215": 4096,
+}
+
+MODEL_BAUDRATE_TABLE = {
+ "scs_series": SCS_SERIES_BAUDRATE_TABLE,
+ "sts3215": SCS_SERIES_BAUDRATE_TABLE,
+}
+
+# High number of retries is needed for feetech compared to dynamixel motors.
+NUM_READ_RETRY = 20
+NUM_WRITE_RETRY = 20
+
+
+def convert_degrees_to_steps(degrees: float | np.ndarray, models: str | list[str]) -> np.ndarray:
+ """This function converts the degree range to the step range for indicating motors rotation.
+ It assumes a motor achieves a full rotation by going from -180 degree position to +180.
+ The motor resolution (e.g. 4096) corresponds to the number of steps needed to achieve a full rotation.
+ """
+ resolutions = [MODEL_RESOLUTION[model] for model in models]
+ steps = degrees / 180 * np.array(resolutions) / 2
+ steps = steps.astype(int)
+ return steps
+
+
+def convert_to_bytes(value, bytes, mock=False):
+ if mock:
+ return value
+
+ import scservo_sdk as scs
+
+ # Note: No need to convert back into unsigned int, since this byte preprocessing
+ # already handles it for us.
+ if bytes == 1:
+ data = [
+ scs.SCS_LOBYTE(scs.SCS_LOWORD(value)),
+ ]
+ elif bytes == 2:
+ data = [
+ scs.SCS_LOBYTE(scs.SCS_LOWORD(value)),
+ scs.SCS_HIBYTE(scs.SCS_LOWORD(value)),
+ ]
+ elif bytes == 4:
+ data = [
+ scs.SCS_LOBYTE(scs.SCS_LOWORD(value)),
+ scs.SCS_HIBYTE(scs.SCS_LOWORD(value)),
+ scs.SCS_LOBYTE(scs.SCS_HIWORD(value)),
+ scs.SCS_HIBYTE(scs.SCS_HIWORD(value)),
+ ]
+ else:
+ raise NotImplementedError(
+ f"Value of the number of bytes to be sent is expected to be in [1, 2, 4], but "
+ f"{bytes} is provided instead."
+ )
+ return data
+
+
+def get_group_sync_key(data_name, motor_names):
+ group_key = f"{data_name}_" + "_".join(motor_names)
+ return group_key
+
+
+def get_result_name(fn_name, data_name, motor_names):
+ group_key = get_group_sync_key(data_name, motor_names)
+ rslt_name = f"{fn_name}_{group_key}"
+ return rslt_name
+
+
+def get_queue_name(fn_name, data_name, motor_names):
+ group_key = get_group_sync_key(data_name, motor_names)
+ queue_name = f"{fn_name}_{group_key}"
+ return queue_name
+
+
+def get_log_name(var_name, fn_name, data_name, motor_names):
+ group_key = get_group_sync_key(data_name, motor_names)
+ log_name = f"{var_name}_{fn_name}_{group_key}"
+ return log_name
+
+
+def assert_same_address(model_ctrl_table, motor_models, data_name):
+ all_addr = []
+ all_bytes = []
+ for model in motor_models:
+ addr, bytes = model_ctrl_table[model][data_name]
+ all_addr.append(addr)
+ all_bytes.append(bytes)
+
+ if len(set(all_addr)) != 1:
+ raise NotImplementedError(
+ f"At least two motor models use a different address for `data_name`='{data_name}' ({list(zip(motor_models, all_addr, strict=False))}). Contact a LeRobot maintainer."
+ )
+
+ if len(set(all_bytes)) != 1:
+ raise NotImplementedError(
+ f"At least two motor models use a different bytes representation for `data_name`='{data_name}' ({list(zip(motor_models, all_bytes, strict=False))}). Contact a LeRobot maintainer."
+ )
+
+
+class TorqueMode(enum.Enum):
+ ENABLED = 1
+ DISABLED = 0
+
+
+class DriveMode(enum.Enum):
+ NON_INVERTED = 0
+ INVERTED = 1
+
+
+class CalibrationMode(enum.Enum):
+ # Joints with rotational motions are expressed in degrees in nominal range of [-180, 180]
+ DEGREE = 0
+ # Joints with linear motions (like gripper of Aloha) are experessed in nominal range of [0, 100]
+ LINEAR = 1
+
+
+class JointOutOfRangeError(Exception):
+ def __init__(self, message="Joint is out of range"):
+ self.message = message
+ super().__init__(self.message)
+
+
+class FeetechMotorsBus:
+ """
+ The FeetechMotorsBus class allows to efficiently read and write to the attached motors. It relies on
+ the python feetech sdk to communicate with the motors. For more info, see the [feetech SDK Documentation](https://emanual.robotis.com/docs/en/software/feetech/feetech_sdk/sample_code/python_read_write_protocol_2_0/#python-read-write-protocol-20).
+
+ A FeetechMotorsBus instance requires a port (e.g. `FeetechMotorsBus(port="/dev/tty.usbmodem575E0031751"`)).
+ To find the port, you can run our utility script:
+ ```bash
+ python lerobot/scripts/find_motors_bus_port.py
+ >>> Finding all available ports for the MotorsBus.
+ >>> ['/dev/tty.usbmodem575E0032081', '/dev/tty.usbmodem575E0031751']
+ >>> Remove the usb cable from your FeetechMotorsBus and press Enter when done.
+ >>> The port of this FeetechMotorsBus is /dev/tty.usbmodem575E0031751.
+ >>> Reconnect the usb cable.
+ ```
+
+ Example of usage for 1 motor connected to the bus:
+ ```python
+ motor_name = "gripper"
+ motor_index = 6
+ motor_model = "sts3215"
+
+ motors_bus = FeetechMotorsBus(
+ port="/dev/tty.usbmodem575E0031751",
+ motors={motor_name: (motor_index, motor_model)},
+ )
+ motors_bus.connect()
+
+ position = motors_bus.read("Present_Position")
+
+ # move from a few motor steps as an example
+ few_steps = 30
+ motors_bus.write("Goal_Position", position + few_steps)
+
+ # when done, consider disconnecting
+ motors_bus.disconnect()
+ ```
+ """
+
+ def __init__(
+ self,
+ port: str,
+ motors: dict[str, tuple[int, str]],
+ extra_model_control_table: dict[str, list[tuple]] | None = None,
+ extra_model_resolution: dict[str, int] | None = None,
+ mock=False,
+ ):
+ self.port = port
+ self.motors = motors
+ self.mock = mock
+
+ self.model_ctrl_table = deepcopy(MODEL_CONTROL_TABLE)
+ if extra_model_control_table:
+ self.model_ctrl_table.update(extra_model_control_table)
+
+ self.model_resolution = deepcopy(MODEL_RESOLUTION)
+ if extra_model_resolution:
+ self.model_resolution.update(extra_model_resolution)
+
+ self.port_handler = None
+ self.packet_handler = None
+ self.calibration = None
+ self.is_connected = False
+ self.group_readers = {}
+ self.group_writers = {}
+ self.logs = {}
+
+ self.track_positions = {}
+
+ def connect(self):
+ if self.is_connected:
+ raise RobotDeviceAlreadyConnectedError(
+ f"FeetechMotorsBus({self.port}) is already connected. Do not call `motors_bus.connect()` twice."
+ )
+
+ if self.mock:
+ import tests.mock_scservo_sdk as scs
+ else:
+ import scservo_sdk as scs
+
+ self.port_handler = scs.PortHandler(self.port)
+ self.packet_handler = scs.PacketHandler(PROTOCOL_VERSION)
+
+ try:
+ if not self.port_handler.openPort():
+ raise OSError(f"Failed to open port '{self.port}'.")
+ except Exception:
+ traceback.print_exc()
+ print(
+ "\nTry running `python lerobot/scripts/find_motors_bus_port.py` to make sure you are using the correct port.\n"
+ )
+ raise
+
+ # Allow to read and write
+ self.is_connected = True
+
+ self.port_handler.setPacketTimeoutMillis(TIMEOUT_MS)
+
+ def reconnect(self):
+ if self.mock:
+ import tests.mock_scservo_sdk as scs
+ else:
+ import scservo_sdk as scs
+
+ self.port_handler = scs.PortHandler(self.port)
+ self.packet_handler = scs.PacketHandler(PROTOCOL_VERSION)
+
+ if not self.port_handler.openPort():
+ raise OSError(f"Failed to open port '{self.port}'.")
+
+ self.is_connected = True
+
+ def are_motors_configured(self):
+ # Only check the motor indices and not baudrate, since if the motor baudrates are incorrect,
+ # a ConnectionError will be raised anyway.
+ try:
+ return (self.motor_indices == self.read("ID")).all()
+ except ConnectionError as e:
+ print(e)
+ return False
+
+ def find_motor_indices(self, possible_ids=None, num_retry=2):
+ if possible_ids is None:
+ possible_ids = range(MAX_ID_RANGE)
+
+ indices = []
+ for idx in tqdm.tqdm(possible_ids):
+ try:
+ present_idx = self.read_with_motor_ids(self.motor_models, [idx], "ID", num_retry=num_retry)[0]
+ except ConnectionError:
+ continue
+
+ if idx != present_idx:
+ # sanity check
+ raise OSError(
+ "Motor index used to communicate through the bus is not the same as the one present in the motor memory. The motor memory might be damaged."
+ )
+ indices.append(idx)
+
+ return indices
+
+ def set_bus_baudrate(self, baudrate):
+ present_bus_baudrate = self.port_handler.getBaudRate()
+ if present_bus_baudrate != baudrate:
+ print(f"Setting bus baud rate to {baudrate}. Previously {present_bus_baudrate}.")
+ self.port_handler.setBaudRate(baudrate)
+
+ if self.port_handler.getBaudRate() != baudrate:
+ raise OSError("Failed to write bus baud rate.")
+
+ @property
+ def motor_names(self) -> list[str]:
+ return list(self.motors.keys())
+
+ @property
+ def motor_models(self) -> list[str]:
+ return [model for _, model in self.motors.values()]
+
+ @property
+ def motor_indices(self) -> list[int]:
+ return [idx for idx, _ in self.motors.values()]
+
+ def set_calibration(self, calibration: dict[str, list]):
+ self.calibration = calibration
+
+ def apply_calibration_autocorrect(self, values: np.ndarray | list, motor_names: list[str] | None):
+ """This function apply the calibration, automatically detects out of range errors for motors values and attempt to correct.
+
+ For more info, see docstring of `apply_calibration` and `autocorrect_calibration`.
+ """
+ try:
+ values = self.apply_calibration(values, motor_names)
+ except JointOutOfRangeError as e:
+ print(e)
+ self.autocorrect_calibration(values, motor_names)
+ values = self.apply_calibration(values, motor_names)
+ return values
+
+ def apply_calibration(self, values: np.ndarray | list, motor_names: list[str] | None):
+ """Convert from unsigned int32 joint position range [0, 2**32[ to the universal float32 nominal degree range ]-180.0, 180.0[ with
+ a "zero position" at 0 degree.
+
+ Note: We say "nominal degree range" since the motors can take values outside this range. For instance, 190 degrees, if the motor
+ rotate more than a half a turn from the zero position. However, most motors can't rotate more than 180 degrees and will stay in this range.
+
+ Joints values are original in [0, 2**32[ (unsigned int32). Each motor are expected to complete a full rotation
+ when given a goal position that is + or - their resolution. For instance, feetech xl330-m077 have a resolution of 4096, and
+ at any position in their original range, let's say the position 56734, they complete a full rotation clockwise by moving to 60830,
+ or anticlockwise by moving to 52638. The position in the original range is arbitrary and might change a lot between each motor.
+ To harmonize between motors of the same model, different robots, or even models of different brands, we propose to work
+ in the centered nominal degree range ]-180, 180[.
+ """
+ if motor_names is None:
+ motor_names = self.motor_names
+
+ # Convert from unsigned int32 original range [0, 2**32] to signed float32 range
+ values = values.astype(np.float32)
+
+ for i, name in enumerate(motor_names):
+ calib_idx = self.calibration["motor_names"].index(name)
+ calib_mode = self.calibration["calib_mode"][calib_idx]
+
+ if CalibrationMode[calib_mode] == CalibrationMode.DEGREE:
+ drive_mode = self.calibration["drive_mode"][calib_idx]
+ homing_offset = self.calibration["homing_offset"][calib_idx]
+ _, model = self.motors[name]
+ resolution = self.model_resolution[model]
+
+ # Update direction of rotation of the motor to match between leader and follower.
+ # In fact, the motor of the leader for a given joint can be assembled in an
+ # opposite direction in term of rotation than the motor of the follower on the same joint.
+ if drive_mode:
+ values[i] *= -1
+
+ # Convert from range [-2**31, 2**31[ to
+ # nominal range ]-resolution, resolution[ (e.g. ]-2048, 2048[)
+ values[i] += homing_offset
+
+ # Convert from range ]-resolution, resolution[ to
+ # universal float32 centered degree range ]-180, 180[
+ values[i] = values[i] / (resolution // 2) * HALF_TURN_DEGREE
+
+ if (values[i] < LOWER_BOUND_DEGREE) or (values[i] > UPPER_BOUND_DEGREE):
+ raise JointOutOfRangeError(
+ f"Wrong motor position range detected for {name}. "
+ f"Expected to be in nominal range of [-{HALF_TURN_DEGREE}, {HALF_TURN_DEGREE}] degrees (a full rotation), "
+ f"with a maximum range of [{LOWER_BOUND_DEGREE}, {UPPER_BOUND_DEGREE}] degrees to account for joints that can rotate a bit more, "
+ f"but present value is {values[i]} degree. "
+ "This might be due to a cable connection issue creating an artificial 360 degrees jump in motor values. "
+ "You need to recalibrate by running: `python lerobot/scripts/control_robot.py calibrate`"
+ )
+
+ elif CalibrationMode[calib_mode] == CalibrationMode.LINEAR:
+ start_pos = self.calibration["start_pos"][calib_idx]
+ end_pos = self.calibration["end_pos"][calib_idx]
+
+ # Rescale the present position to a nominal range [0, 100] %,
+ # useful for joints with linear motions like Aloha gripper
+ values[i] = (values[i] - start_pos) / (end_pos - start_pos) * 100
+
+ if (values[i] < LOWER_BOUND_LINEAR) or (values[i] > UPPER_BOUND_LINEAR):
+ raise JointOutOfRangeError(
+ f"Wrong motor position range detected for {name}. "
+ f"Expected to be in nominal range of [0, 100] % (a full linear translation), "
+ f"with a maximum range of [{LOWER_BOUND_LINEAR}, {UPPER_BOUND_LINEAR}] % to account for some imprecision during calibration, "
+ f"but present value is {values[i]} %. "
+ "This might be due to a cable connection issue creating an artificial jump in motor values. "
+ "You need to recalibrate by running: `python lerobot/scripts/control_robot.py calibrate`"
+ )
+
+ return values
+
+ def autocorrect_calibration(self, values: np.ndarray | list, motor_names: list[str] | None):
+ """This function automatically detects issues with values of motors after calibration, and correct for these issues.
+
+ Some motors might have values outside of expected maximum bounds after calibration.
+ For instance, for a joint in degree, its value can be outside [-270, 270] degrees, which is totally unexpected given
+ a nominal range of [-180, 180] degrees, which represents half a turn to the left or right starting from zero position.
+
+ Known issues:
+ #1: Motor value randomly shifts of a full turn, caused by hardware/connection errors.
+ #2: Motor internal homing offset is shifted of a full turn, caused by using default calibration (e.g Aloha).
+ #3: motor internal homing offset is shifted of less or more than a full turn, caused by using default calibration
+ or by human error during manual calibration.
+
+ Issues #1 and #2 can be solved by shifting the calibration homing offset by a full turn.
+ Issue #3 will be visually detected by user and potentially captured by the safety feature `max_relative_target`,
+ that will slow down the motor, raise an error asking to recalibrate. Manual recalibrating will solve the issue.
+
+ Note: A full turn corresponds to 360 degrees but also to 4096 steps for a motor resolution of 4096.
+ """
+ if motor_names is None:
+ motor_names = self.motor_names
+
+ # Convert from unsigned int32 original range [0, 2**32] to signed float32 range
+ values = values.astype(np.float32)
+
+ for i, name in enumerate(motor_names):
+ calib_idx = self.calibration["motor_names"].index(name)
+ calib_mode = self.calibration["calib_mode"][calib_idx]
+
+ if CalibrationMode[calib_mode] == CalibrationMode.DEGREE:
+ drive_mode = self.calibration["drive_mode"][calib_idx]
+ homing_offset = self.calibration["homing_offset"][calib_idx]
+ _, model = self.motors[name]
+ resolution = self.model_resolution[model]
+
+ if drive_mode:
+ values[i] *= -1
+
+ # Convert from initial range to range [-180, 180] degrees
+ calib_val = (values[i] + homing_offset) / (resolution // 2) * HALF_TURN_DEGREE
+ in_range = (calib_val > LOWER_BOUND_DEGREE) and (calib_val < UPPER_BOUND_DEGREE)
+
+ # Solve this inequality to find the factor to shift the range into [-180, 180] degrees
+ # values[i] = (values[i] + homing_offset + resolution * factor) / (resolution // 2) * HALF_TURN_DEGREE
+ # - HALF_TURN_DEGREE <= (values[i] + homing_offset + resolution * factor) / (resolution // 2) * HALF_TURN_DEGREE <= HALF_TURN_DEGREE
+ # (- HALF_TURN_DEGREE / HALF_TURN_DEGREE * (resolution // 2) - values[i] - homing_offset) / resolution <= factor <= (HALF_TURN_DEGREE / 180 * (resolution // 2) - values[i] - homing_offset) / resolution
+ low_factor = (
+ -HALF_TURN_DEGREE / HALF_TURN_DEGREE * (resolution // 2) - values[i] - homing_offset
+ ) / resolution
+ upp_factor = (
+ HALF_TURN_DEGREE / HALF_TURN_DEGREE * (resolution // 2) - values[i] - homing_offset
+ ) / resolution
+
+ elif CalibrationMode[calib_mode] == CalibrationMode.LINEAR:
+ start_pos = self.calibration["start_pos"][calib_idx]
+ end_pos = self.calibration["end_pos"][calib_idx]
+
+ # Convert from initial range to range [0, 100] in %
+ calib_val = (values[i] - start_pos) / (end_pos - start_pos) * 100
+ in_range = (calib_val > LOWER_BOUND_LINEAR) and (calib_val < UPPER_BOUND_LINEAR)
+
+ # Solve this inequality to find the factor to shift the range into [0, 100] %
+ # values[i] = (values[i] - start_pos + resolution * factor) / (end_pos + resolution * factor - start_pos - resolution * factor) * 100
+ # values[i] = (values[i] - start_pos + resolution * factor) / (end_pos - start_pos) * 100
+ # 0 <= (values[i] - start_pos + resolution * factor) / (end_pos - start_pos) * 100 <= 100
+ # (start_pos - values[i]) / resolution <= factor <= (end_pos - values[i]) / resolution
+ low_factor = (start_pos - values[i]) / resolution
+ upp_factor = (end_pos - values[i]) / resolution
+
+ if not in_range:
+ # Get first integer between the two bounds
+ if low_factor < upp_factor:
+ factor = math.ceil(low_factor)
+
+ if factor > upp_factor:
+ raise ValueError(f"No integer found between bounds [{low_factor=}, {upp_factor=}]")
+ else:
+ factor = math.ceil(upp_factor)
+
+ if factor > low_factor:
+ raise ValueError(f"No integer found between bounds [{low_factor=}, {upp_factor=}]")
+
+ if CalibrationMode[calib_mode] == CalibrationMode.DEGREE:
+ out_of_range_str = f"{LOWER_BOUND_DEGREE} < {calib_val} < {UPPER_BOUND_DEGREE} degrees"
+ in_range_str = f"{LOWER_BOUND_DEGREE} < {calib_val} < {UPPER_BOUND_DEGREE} degrees"
+ elif CalibrationMode[calib_mode] == CalibrationMode.LINEAR:
+ out_of_range_str = f"{LOWER_BOUND_LINEAR} < {calib_val} < {UPPER_BOUND_LINEAR} %"
+ in_range_str = f"{LOWER_BOUND_LINEAR} < {calib_val} < {UPPER_BOUND_LINEAR} %"
+
+ logging.warning(
+ f"Auto-correct calibration of motor '{name}' by shifting value by {abs(factor)} full turns, "
+ f"from '{out_of_range_str}' to '{in_range_str}'."
+ )
+
+ # A full turn corresponds to 360 degrees but also to 4096 steps for a motor resolution of 4096.
+ self.calibration["homing_offset"][calib_idx] += resolution * factor
+
+ def revert_calibration(self, values: np.ndarray | list, motor_names: list[str] | None):
+ """Inverse of `apply_calibration`."""
+ if motor_names is None:
+ motor_names = self.motor_names
+
+ for i, name in enumerate(motor_names):
+ calib_idx = self.calibration["motor_names"].index(name)
+ calib_mode = self.calibration["calib_mode"][calib_idx]
+
+ if CalibrationMode[calib_mode] == CalibrationMode.DEGREE:
+ drive_mode = self.calibration["drive_mode"][calib_idx]
+ homing_offset = self.calibration["homing_offset"][calib_idx]
+ _, model = self.motors[name]
+ resolution = self.model_resolution[model]
+
+ # Convert from nominal 0-centered degree range [-180, 180] to
+ # 0-centered resolution range (e.g. [-2048, 2048] for resolution=4096)
+ values[i] = values[i] / HALF_TURN_DEGREE * (resolution // 2)
+
+ # Substract the homing offsets to come back to actual motor range of values
+ # which can be arbitrary.
+ values[i] -= homing_offset
+
+ # Remove drive mode, which is the rotation direction of the motor, to come back to
+ # actual motor rotation direction which can be arbitrary.
+ if drive_mode:
+ values[i] *= -1
+
+ elif CalibrationMode[calib_mode] == CalibrationMode.LINEAR:
+ start_pos = self.calibration["start_pos"][calib_idx]
+ end_pos = self.calibration["end_pos"][calib_idx]
+
+ # Convert from nominal lnear range of [0, 100] % to
+ # actual motor range of values which can be arbitrary.
+ values[i] = values[i] / 100 * (end_pos - start_pos) + start_pos
+
+ values = np.round(values).astype(np.int32)
+ return values
+
+ def avoid_rotation_reset(self, values, motor_names, data_name):
+ if data_name not in self.track_positions:
+ self.track_positions[data_name] = {
+ "prev": [None] * len(self.motor_names),
+ # Assume False at initialization
+ "below_zero": [False] * len(self.motor_names),
+ "above_max": [False] * len(self.motor_names),
+ }
+
+ track = self.track_positions[data_name]
+
+ if motor_names is None:
+ motor_names = self.motor_names
+
+ for i, name in enumerate(motor_names):
+ idx = self.motor_names.index(name)
+
+ if track["prev"][idx] is None:
+ track["prev"][idx] = values[i]
+ continue
+
+ # Detect a full rotation occured
+ if abs(track["prev"][idx] - values[i]) > 2048:
+ # Position went below 0 and got reset to 4095
+ if track["prev"][idx] < values[i]:
+ # So we set negative value by adding a full rotation
+ values[i] -= 4096
+
+ # Position went above 4095 and got reset to 0
+ elif track["prev"][idx] > values[i]:
+ # So we add a full rotation
+ values[i] += 4096
+
+ track["prev"][idx] = values[i]
+
+ return values
+
+ def read_with_motor_ids(self, motor_models, motor_ids, data_name, num_retry=NUM_READ_RETRY):
+ if self.mock:
+ import tests.mock_scservo_sdk as scs
+ else:
+ import scservo_sdk as scs
+
+ return_list = True
+ if not isinstance(motor_ids, list):
+ return_list = False
+ motor_ids = [motor_ids]
+
+ assert_same_address(self.model_ctrl_table, self.motor_models, data_name)
+ addr, bytes = self.model_ctrl_table[motor_models[0]][data_name]
+ group = scs.GroupSyncRead(self.port_handler, self.packet_handler, addr, bytes)
+ for idx in motor_ids:
+ group.addParam(idx)
+
+ for _ in range(num_retry):
+ comm = group.txRxPacket()
+ if comm == scs.COMM_SUCCESS:
+ break
+
+ if comm != scs.COMM_SUCCESS:
+ raise ConnectionError(
+ f"Read failed due to communication error on port {self.port_handler.port_name} for indices {motor_ids}: "
+ f"{self.packet_handler.getTxRxResult(comm)}"
+ )
+
+ values = []
+ for idx in motor_ids:
+ value = group.getData(idx, addr, bytes)
+ values.append(value)
+
+ if return_list:
+ return values
+ else:
+ return values[0]
+
+ def read(self, data_name, motor_names: str | list[str] | None = None):
+ if self.mock:
+ import tests.mock_scservo_sdk as scs
+ else:
+ import scservo_sdk as scs
+
+ if not self.is_connected:
+ raise RobotDeviceNotConnectedError(
+ f"FeetechMotorsBus({self.port}) is not connected. You need to run `motors_bus.connect()`."
+ )
+
+ start_time = time.perf_counter()
+
+ if motor_names is None:
+ motor_names = self.motor_names
+
+ if isinstance(motor_names, str):
+ motor_names = [motor_names]
+
+ motor_ids = []
+ models = []
+ for name in motor_names:
+ motor_idx, model = self.motors[name]
+ motor_ids.append(motor_idx)
+ models.append(model)
+
+ assert_same_address(self.model_ctrl_table, models, data_name)
+ addr, bytes = self.model_ctrl_table[model][data_name]
+ group_key = get_group_sync_key(data_name, motor_names)
+
+ if data_name not in self.group_readers:
+ # create new group reader
+ self.group_readers[group_key] = scs.GroupSyncRead(
+ self.port_handler, self.packet_handler, addr, bytes
+ )
+ for idx in motor_ids:
+ self.group_readers[group_key].addParam(idx)
+
+ for _ in range(NUM_READ_RETRY):
+ comm = self.group_readers[group_key].txRxPacket()
+ if comm == scs.COMM_SUCCESS:
+ break
+
+ if comm != scs.COMM_SUCCESS:
+ raise ConnectionError(
+ f"Read failed due to communication error on port {self.port} for group_key {group_key}: "
+ f"{self.packet_handler.getTxRxResult(comm)}"
+ )
+
+ values = []
+ for idx in motor_ids:
+ value = self.group_readers[group_key].getData(idx, addr, bytes)
+ values.append(value)
+
+ values = np.array(values)
+
+ # Convert to signed int to use range [-2048, 2048] for our motor positions.
+ if data_name in CONVERT_UINT32_TO_INT32_REQUIRED:
+ values = values.astype(np.int32)
+
+ if data_name in CALIBRATION_REQUIRED:
+ values = self.avoid_rotation_reset(values, motor_names, data_name)
+
+ if data_name in CALIBRATION_REQUIRED and self.calibration is not None:
+ values = self.apply_calibration_autocorrect(values, motor_names)
+
+ # log the number of seconds it took to read the data from the motors
+ delta_ts_name = get_log_name("delta_timestamp_s", "read", data_name, motor_names)
+ self.logs[delta_ts_name] = time.perf_counter() - start_time
+
+ # log the utc time at which the data was received
+ ts_utc_name = get_log_name("timestamp_utc", "read", data_name, motor_names)
+ self.logs[ts_utc_name] = capture_timestamp_utc()
+
+ return values
+
+ def write_with_motor_ids(self, motor_models, motor_ids, data_name, values, num_retry=NUM_WRITE_RETRY):
+ if self.mock:
+ import tests.mock_scservo_sdk as scs
+ else:
+ import scservo_sdk as scs
+
+ if not isinstance(motor_ids, list):
+ motor_ids = [motor_ids]
+ if not isinstance(values, list):
+ values = [values]
+
+ assert_same_address(self.model_ctrl_table, motor_models, data_name)
+ addr, bytes = self.model_ctrl_table[motor_models[0]][data_name]
+ group = scs.GroupSyncWrite(self.port_handler, self.packet_handler, addr, bytes)
+ for idx, value in zip(motor_ids, values, strict=True):
+ data = convert_to_bytes(value, bytes, self.mock)
+ group.addParam(idx, data)
+
+ for _ in range(num_retry):
+ comm = group.txPacket()
+ if comm == scs.COMM_SUCCESS:
+ break
+
+ if comm != scs.COMM_SUCCESS:
+ raise ConnectionError(
+ f"Write failed due to communication error on port {self.port_handler.port_name} for indices {motor_ids}: "
+ f"{self.packet_handler.getTxRxResult(comm)}"
+ )
+
+ def write(self, data_name, values: int | float | np.ndarray, motor_names: str | list[str] | None = None):
+ if not self.is_connected:
+ raise RobotDeviceNotConnectedError(
+ f"FeetechMotorsBus({self.port}) is not connected. You need to run `motors_bus.connect()`."
+ )
+
+ start_time = time.perf_counter()
+
+ if self.mock:
+ import tests.mock_scservo_sdk as scs
+ else:
+ import scservo_sdk as scs
+
+ if motor_names is None:
+ motor_names = self.motor_names
+
+ if isinstance(motor_names, str):
+ motor_names = [motor_names]
+
+ if isinstance(values, (int, float, np.integer)):
+ values = [int(values)] * len(motor_names)
+
+ values = np.array(values)
+
+ motor_ids = []
+ models = []
+ for name in motor_names:
+ motor_idx, model = self.motors[name]
+ motor_ids.append(motor_idx)
+ models.append(model)
+
+ if data_name in CALIBRATION_REQUIRED and self.calibration is not None:
+ values = self.revert_calibration(values, motor_names)
+
+ values = values.tolist()
+
+ assert_same_address(self.model_ctrl_table, models, data_name)
+ addr, bytes = self.model_ctrl_table[model][data_name]
+ group_key = get_group_sync_key(data_name, motor_names)
+
+ init_group = data_name not in self.group_readers
+ if init_group:
+ self.group_writers[group_key] = scs.GroupSyncWrite(
+ self.port_handler, self.packet_handler, addr, bytes
+ )
+
+ for idx, value in zip(motor_ids, values, strict=True):
+ data = convert_to_bytes(value, bytes, self.mock)
+ if init_group:
+ self.group_writers[group_key].addParam(idx, data)
+ else:
+ self.group_writers[group_key].changeParam(idx, data)
+
+ comm = self.group_writers[group_key].txPacket()
+ if comm != scs.COMM_SUCCESS:
+ raise ConnectionError(
+ f"Write failed due to communication error on port {self.port} for group_key {group_key}: "
+ f"{self.packet_handler.getTxRxResult(comm)}"
+ )
+
+ # log the number of seconds it took to write the data to the motors
+ delta_ts_name = get_log_name("delta_timestamp_s", "write", data_name, motor_names)
+ self.logs[delta_ts_name] = time.perf_counter() - start_time
+
+ # TODO(rcadene): should we log the time before sending the write command?
+ # log the utc time when the write has been completed
+ ts_utc_name = get_log_name("timestamp_utc", "write", data_name, motor_names)
+ self.logs[ts_utc_name] = capture_timestamp_utc()
+
+ def disconnect(self):
+ if not self.is_connected:
+ raise RobotDeviceNotConnectedError(
+ f"FeetechMotorsBus({self.port}) is not connected. Try running `motors_bus.connect()` first."
+ )
+
+ if self.port_handler is not None:
+ self.port_handler.closePort()
+ self.port_handler = None
+
+ self.packet_handler = None
+ self.group_readers = {}
+ self.group_writers = {}
+ self.is_connected = False
+
+ def __del__(self):
+ if getattr(self, "is_connected", False):
+ self.disconnect()
diff --git a/lerobot/common/robot_devices/robots/dynamixel_calibration.py b/lerobot/common/robot_devices/robots/dynamixel_calibration.py
new file mode 100644
index 000000000..5c4932d2e
--- /dev/null
+++ b/lerobot/common/robot_devices/robots/dynamixel_calibration.py
@@ -0,0 +1,130 @@
+"""Logic to calibrate a robot arm built with dynamixel motors"""
+# TODO(rcadene, aliberts): move this logic into the robot code when refactoring
+
+import numpy as np
+
+from lerobot.common.robot_devices.motors.dynamixel import (
+ CalibrationMode,
+ TorqueMode,
+ convert_degrees_to_steps,
+)
+from lerobot.common.robot_devices.motors.utils import MotorsBus
+
+URL_TEMPLATE = (
+ "https://raw.githubusercontent.com/huggingface/lerobot/main/media/{robot}/{arm}_{position}.webp"
+)
+
+# The following positions are provided in nominal degree range ]-180, +180[
+# For more info on these constants, see comments in the code where they get used.
+ZERO_POSITION_DEGREE = 0
+ROTATED_POSITION_DEGREE = 90
+
+
+def assert_drive_mode(drive_mode):
+ # `drive_mode` is in [0,1] with 0 means original rotation direction for the motor, and 1 means inverted.
+ if not np.all(np.isin(drive_mode, [0, 1])):
+ raise ValueError(f"`drive_mode` contains values other than 0 or 1: ({drive_mode})")
+
+
+def apply_drive_mode(position, drive_mode):
+ assert_drive_mode(drive_mode)
+ # Convert `drive_mode` from [0, 1] with 0 indicates original rotation direction and 1 inverted,
+ # to [-1, 1] with 1 indicates original rotation direction and -1 inverted.
+ signed_drive_mode = -(drive_mode * 2 - 1)
+ position *= signed_drive_mode
+ return position
+
+
+def compute_nearest_rounded_position(position, models):
+ delta_turn = convert_degrees_to_steps(ROTATED_POSITION_DEGREE, models)
+ nearest_pos = np.round(position.astype(float) / delta_turn) * delta_turn
+ return nearest_pos.astype(position.dtype)
+
+
+def run_arm_calibration(arm: MotorsBus, robot_type: str, arm_name: str, arm_type: str):
+ """This function ensures that a neural network trained on data collected on a given robot
+ can work on another robot. For instance before calibration, setting a same goal position
+ for each motor of two different robots will get two very different positions. But after calibration,
+ the two robots will move to the same position.To this end, this function computes the homing offset
+ and the drive mode for each motor of a given robot.
+
+ Homing offset is used to shift the motor position to a ]-2048, +2048[ nominal range (when the motor uses 2048 steps
+ to complete a half a turn). This range is set around an arbitrary "zero position" corresponding to all motor positions
+ being 0. During the calibration process, you will need to manually move the robot to this "zero position".
+
+ Drive mode is used to invert the rotation direction of the motor. This is useful when some motors have been assembled
+ in the opposite orientation for some robots. During the calibration process, you will need to manually move the robot
+ to the "rotated position".
+
+ After calibration, the homing offsets and drive modes are stored in a cache.
+
+ Example of usage:
+ ```python
+ run_arm_calibration(arm, "koch", "left", "follower")
+ ```
+ """
+ if (arm.read("Torque_Enable") != TorqueMode.DISABLED.value).any():
+ raise ValueError("To run calibration, the torque must be disabled on all motors.")
+
+ print(f"\nRunning calibration of {robot_type} {arm_name} {arm_type}...")
+
+ print("\nMove arm to zero position")
+ print("See: " + URL_TEMPLATE.format(robot=robot_type, arm=arm_type, position="zero"))
+ input("Press Enter to continue...")
+
+ # We arbitrarily chose our zero target position to be a straight horizontal position with gripper upwards and closed.
+ # It is easy to identify and all motors are in a "quarter turn" position. Once calibration is done, this position will
+ # correspond to every motor angle being 0. If you set all 0 as Goal Position, the arm will move in this position.
+ zero_target_pos = convert_degrees_to_steps(ZERO_POSITION_DEGREE, arm.motor_models)
+
+ # Compute homing offset so that `present_position + homing_offset ~= target_position`.
+ zero_pos = arm.read("Present_Position")
+ zero_nearest_pos = compute_nearest_rounded_position(zero_pos, arm.motor_models)
+ homing_offset = zero_target_pos - zero_nearest_pos
+
+ # The rotated target position corresponds to a rotation of a quarter turn from the zero position.
+ # This allows to identify the rotation direction of each motor.
+ # For instance, if the motor rotates 90 degree, and its value is -90 after applying the homing offset, then we know its rotation direction
+ # is inverted. However, for the calibration being successful, we need everyone to follow the same target position.
+ # Sometimes, there is only one possible rotation direction. For instance, if the gripper is closed, there is only one direction which
+ # corresponds to opening the gripper. When the rotation direction is ambiguous, we arbitrarely rotate clockwise from the point of view
+ # of the previous motor in the kinetic chain.
+ print("\nMove arm to rotated target position")
+ print("See: " + URL_TEMPLATE.format(robot=robot_type, arm=arm_type, position="rotated"))
+ input("Press Enter to continue...")
+
+ rotated_target_pos = convert_degrees_to_steps(ROTATED_POSITION_DEGREE, arm.motor_models)
+
+ # Find drive mode by rotating each motor by a quarter of a turn.
+ # Drive mode indicates if the motor rotation direction should be inverted (=1) or not (=0).
+ rotated_pos = arm.read("Present_Position")
+ drive_mode = (rotated_pos < zero_pos).astype(np.int32)
+
+ # Re-compute homing offset to take into account drive mode
+ rotated_drived_pos = apply_drive_mode(rotated_pos, drive_mode)
+ rotated_nearest_pos = compute_nearest_rounded_position(rotated_drived_pos, arm.motor_models)
+ homing_offset = rotated_target_pos - rotated_nearest_pos
+
+ print("\nMove arm to rest position")
+ print("See: " + URL_TEMPLATE.format(robot=robot_type, arm=arm_type, position="rest"))
+ input("Press Enter to continue...")
+ print()
+
+ # Joints with rotational motions are expressed in degrees in nominal range of [-180, 180]
+ calib_mode = [CalibrationMode.DEGREE.name] * len(arm.motor_names)
+
+ # TODO(rcadene): make type of joints (DEGREE or LINEAR) configurable from yaml?
+ if robot_type in ["aloha"] and "gripper" in arm.motor_names:
+ # Joints with linear motions (like gripper of Aloha) are experessed in nominal range of [0, 100]
+ calib_idx = arm.motor_names.index("gripper")
+ calib_mode[calib_idx] = CalibrationMode.LINEAR.name
+
+ calib_data = {
+ "homing_offset": homing_offset.tolist(),
+ "drive_mode": drive_mode.tolist(),
+ "start_pos": zero_pos.tolist(),
+ "end_pos": rotated_pos.tolist(),
+ "calib_mode": calib_mode,
+ "motor_names": arm.motor_names,
+ }
+ return calib_data
diff --git a/lerobot/common/robot_devices/robots/factory.py b/lerobot/common/robot_devices/robots/factory.py
index 2edcd2925..17e8e5e6a 100644
--- a/lerobot/common/robot_devices/robots/factory.py
+++ b/lerobot/common/robot_devices/robots/factory.py
@@ -1,7 +1,9 @@
import hydra
from omegaconf import DictConfig
+from lerobot.common.robot_devices.robots.utils import Robot
-def make_robot(cfg: DictConfig):
+
+def make_robot(cfg: DictConfig) -> Robot:
robot = hydra.utils.instantiate(cfg)
return robot
diff --git a/lerobot/common/robot_devices/robots/feetech_calibration.py b/lerobot/common/robot_devices/robots/feetech_calibration.py
new file mode 100644
index 000000000..b015951a0
--- /dev/null
+++ b/lerobot/common/robot_devices/robots/feetech_calibration.py
@@ -0,0 +1,484 @@
+"""Logic to calibrate a robot arm built with feetech motors"""
+# TODO(rcadene, aliberts): move this logic into the robot code when refactoring
+
+import time
+
+import numpy as np
+
+from lerobot.common.robot_devices.motors.feetech import (
+ CalibrationMode,
+ TorqueMode,
+ convert_degrees_to_steps,
+)
+from lerobot.common.robot_devices.motors.utils import MotorsBus
+
+URL_TEMPLATE = (
+ "https://raw.githubusercontent.com/huggingface/lerobot/main/media/{robot}/{arm}_{position}.webp"
+)
+
+# The following positions are provided in nominal degree range ]-180, +180[
+# For more info on these constants, see comments in the code where they get used.
+ZERO_POSITION_DEGREE = 0
+ROTATED_POSITION_DEGREE = 90
+
+
+def assert_drive_mode(drive_mode):
+ # `drive_mode` is in [0,1] with 0 means original rotation direction for the motor, and 1 means inverted.
+ if not np.all(np.isin(drive_mode, [0, 1])):
+ raise ValueError(f"`drive_mode` contains values other than 0 or 1: ({drive_mode})")
+
+
+def apply_drive_mode(position, drive_mode):
+ assert_drive_mode(drive_mode)
+ # Convert `drive_mode` from [0, 1] with 0 indicates original rotation direction and 1 inverted,
+ # to [-1, 1] with 1 indicates original rotation direction and -1 inverted.
+ signed_drive_mode = -(drive_mode * 2 - 1)
+ position *= signed_drive_mode
+ return position
+
+
+def move_until_block(arm, motor_name, positive_direction=True, while_move_hook=None):
+ count = 0
+ while True:
+ present_pos = arm.read("Present_Position", motor_name)
+ if positive_direction:
+ # Move +100 steps every time. Lower the steps to lower the speed at which the arm moves.
+ arm.write("Goal_Position", present_pos + 100, motor_name)
+ else:
+ arm.write("Goal_Position", present_pos - 100, motor_name)
+
+ if while_move_hook is not None:
+ while_move_hook()
+
+ present_pos = arm.read("Present_Position", motor_name).item()
+ present_speed = arm.read("Present_Speed", motor_name).item()
+ present_current = arm.read("Present_Current", motor_name).item()
+ # present_load = arm.read("Present_Load", motor_name).item()
+ # present_voltage = arm.read("Present_Voltage", motor_name).item()
+ # present_temperature = arm.read("Present_Temperature", motor_name).item()
+
+ # print(f"{present_pos=}")
+ # print(f"{present_speed=}")
+ # print(f"{present_current=}")
+ # print(f"{present_load=}")
+ # print(f"{present_voltage=}")
+ # print(f"{present_temperature=}")
+
+ if present_speed == 0 and present_current > 40:
+ count += 1
+ if count > 100 or present_current > 300:
+ return present_pos
+ else:
+ count = 0
+
+
+def move_to_calibrate(
+ arm,
+ motor_name,
+ invert_drive_mode=False,
+ positive_first=True,
+ in_between_move_hook=None,
+ while_move_hook=None,
+):
+ initial_pos = arm.read("Present_Position", motor_name)
+
+ if positive_first:
+ p_present_pos = move_until_block(
+ arm, motor_name, positive_direction=True, while_move_hook=while_move_hook
+ )
+ else:
+ n_present_pos = move_until_block(
+ arm, motor_name, positive_direction=False, while_move_hook=while_move_hook
+ )
+
+ if in_between_move_hook is not None:
+ in_between_move_hook()
+
+ if positive_first:
+ n_present_pos = move_until_block(
+ arm, motor_name, positive_direction=False, while_move_hook=while_move_hook
+ )
+ else:
+ p_present_pos = move_until_block(
+ arm, motor_name, positive_direction=True, while_move_hook=while_move_hook
+ )
+
+ zero_pos = (n_present_pos + p_present_pos) / 2
+
+ calib_data = {
+ "initial_pos": initial_pos,
+ "homing_offset": zero_pos if invert_drive_mode else -zero_pos,
+ "invert_drive_mode": invert_drive_mode,
+ "drive_mode": -1 if invert_drive_mode else 0,
+ "zero_pos": zero_pos,
+ "start_pos": n_present_pos if invert_drive_mode else p_present_pos,
+ "end_pos": p_present_pos if invert_drive_mode else n_present_pos,
+ }
+ return calib_data
+
+
+def apply_offset(calib, offset):
+ calib["zero_pos"] += offset
+ if calib["drive_mode"]:
+ calib["homing_offset"] += offset
+ else:
+ calib["homing_offset"] -= offset
+ return calib
+
+
+def run_arm_auto_calibration(arm: MotorsBus, robot_type: str, arm_name: str, arm_type: str):
+ if robot_type == "so100":
+ return run_arm_auto_calibration_so100(arm, robot_type, arm_name, arm_type)
+ elif robot_type == "moss":
+ return run_arm_auto_calibration_moss(arm, robot_type, arm_name, arm_type)
+ else:
+ raise ValueError(robot_type)
+
+
+def run_arm_auto_calibration_so100(arm: MotorsBus, robot_type: str, arm_name: str, arm_type: str):
+ """All the offsets and magic numbers are hand tuned, and are unique to SO-100 follower arms"""
+ if (arm.read("Torque_Enable") != TorqueMode.DISABLED.value).any():
+ raise ValueError("To run calibration, the torque must be disabled on all motors.")
+
+ if not (robot_type == "so100" and arm_type == "follower"):
+ raise NotImplementedError("Auto calibration only supports the follower of so100 arms for now.")
+
+ print(f"\nRunning calibration of {robot_type} {arm_name} {arm_type}...")
+
+ print("\nMove arm to initial position")
+ print("See: " + URL_TEMPLATE.format(robot=robot_type, arm=arm_type, position="initial"))
+ input("Press Enter to continue...")
+
+ # Lower the acceleration of the motors (in [0,254])
+ initial_acceleration = arm.read("Acceleration")
+ arm.write("Lock", 0)
+ arm.write("Acceleration", 10)
+ time.sleep(1)
+
+ arm.write("Torque_Enable", TorqueMode.ENABLED.value)
+
+ print(f'{arm.read("Present_Position", "elbow_flex")=}')
+
+ calib = {}
+
+ init_wf_pos = arm.read("Present_Position", "wrist_flex")
+ init_sl_pos = arm.read("Present_Position", "shoulder_lift")
+ init_ef_pos = arm.read("Present_Position", "elbow_flex")
+ arm.write("Goal_Position", init_wf_pos - 800, "wrist_flex")
+ arm.write("Goal_Position", init_sl_pos + 150 + 1024, "shoulder_lift")
+ arm.write("Goal_Position", init_ef_pos - 2048, "elbow_flex")
+ time.sleep(2)
+
+ print("Calibrate shoulder_pan")
+ calib["shoulder_pan"] = move_to_calibrate(arm, "shoulder_pan")
+ arm.write("Goal_Position", calib["shoulder_pan"]["zero_pos"], "shoulder_pan")
+ time.sleep(1)
+
+ print("Calibrate gripper")
+ calib["gripper"] = move_to_calibrate(arm, "gripper", invert_drive_mode=True)
+ time.sleep(1)
+
+ print("Calibrate wrist_flex")
+ calib["wrist_flex"] = move_to_calibrate(arm, "wrist_flex")
+ calib["wrist_flex"] = apply_offset(calib["wrist_flex"], offset=80)
+
+ def in_between_move_hook():
+ nonlocal arm, calib
+ time.sleep(2)
+ ef_pos = arm.read("Present_Position", "elbow_flex")
+ sl_pos = arm.read("Present_Position", "shoulder_lift")
+ arm.write("Goal_Position", ef_pos + 1024, "elbow_flex")
+ arm.write("Goal_Position", sl_pos - 1024, "shoulder_lift")
+ time.sleep(2)
+
+ print("Calibrate elbow_flex")
+ calib["elbow_flex"] = move_to_calibrate(
+ arm, "elbow_flex", positive_first=False, in_between_move_hook=in_between_move_hook
+ )
+ calib["elbow_flex"] = apply_offset(calib["elbow_flex"], offset=80 - 1024)
+
+ arm.write("Goal_Position", calib["elbow_flex"]["zero_pos"] + 1024 + 512, "elbow_flex")
+ time.sleep(1)
+
+ def in_between_move_hook():
+ nonlocal arm, calib
+ arm.write("Goal_Position", calib["elbow_flex"]["zero_pos"], "elbow_flex")
+
+ print("Calibrate shoulder_lift")
+ calib["shoulder_lift"] = move_to_calibrate(
+ arm,
+ "shoulder_lift",
+ invert_drive_mode=True,
+ positive_first=False,
+ in_between_move_hook=in_between_move_hook,
+ )
+ # add an 30 steps as offset to align with body
+ calib["shoulder_lift"] = apply_offset(calib["shoulder_lift"], offset=1024 - 50)
+
+ def while_move_hook():
+ nonlocal arm, calib
+ positions = {
+ "shoulder_lift": round(calib["shoulder_lift"]["zero_pos"] - 1600),
+ "elbow_flex": round(calib["elbow_flex"]["zero_pos"] + 1700),
+ "wrist_flex": round(calib["wrist_flex"]["zero_pos"] + 800),
+ "gripper": round(calib["gripper"]["end_pos"]),
+ }
+ arm.write("Goal_Position", list(positions.values()), list(positions.keys()))
+
+ arm.write("Goal_Position", round(calib["shoulder_lift"]["zero_pos"] - 1600), "shoulder_lift")
+ time.sleep(2)
+ arm.write("Goal_Position", round(calib["elbow_flex"]["zero_pos"] + 1700), "elbow_flex")
+ time.sleep(2)
+ arm.write("Goal_Position", round(calib["wrist_flex"]["zero_pos"] + 800), "wrist_flex")
+ time.sleep(2)
+ arm.write("Goal_Position", round(calib["gripper"]["end_pos"]), "gripper")
+ time.sleep(2)
+
+ print("Calibrate wrist_roll")
+ calib["wrist_roll"] = move_to_calibrate(
+ arm, "wrist_roll", invert_drive_mode=True, positive_first=False, while_move_hook=while_move_hook
+ )
+
+ arm.write("Goal_Position", calib["wrist_roll"]["zero_pos"], "wrist_roll")
+ time.sleep(1)
+ arm.write("Goal_Position", calib["gripper"]["start_pos"], "gripper")
+ time.sleep(1)
+ arm.write("Goal_Position", calib["wrist_flex"]["zero_pos"], "wrist_flex")
+ time.sleep(1)
+ arm.write("Goal_Position", calib["elbow_flex"]["zero_pos"] + 2048, "elbow_flex")
+ arm.write("Goal_Position", calib["shoulder_lift"]["zero_pos"] - 2048, "shoulder_lift")
+ time.sleep(1)
+ arm.write("Goal_Position", calib["shoulder_pan"]["zero_pos"], "shoulder_pan")
+ time.sleep(1)
+
+ calib_modes = []
+ for name in arm.motor_names:
+ if name == "gripper":
+ calib_modes.append(CalibrationMode.LINEAR.name)
+ else:
+ calib_modes.append(CalibrationMode.DEGREE.name)
+
+ calib_dict = {
+ "homing_offset": [calib[name]["homing_offset"] for name in arm.motor_names],
+ "drive_mode": [calib[name]["drive_mode"] for name in arm.motor_names],
+ "start_pos": [calib[name]["start_pos"] for name in arm.motor_names],
+ "end_pos": [calib[name]["end_pos"] for name in arm.motor_names],
+ "calib_mode": calib_modes,
+ "motor_names": arm.motor_names,
+ }
+
+ # Re-enable original accerlation
+ arm.write("Lock", 0)
+ arm.write("Acceleration", initial_acceleration)
+ time.sleep(1)
+
+ return calib_dict
+
+
+def run_arm_auto_calibration_moss(arm: MotorsBus, robot_type: str, arm_name: str, arm_type: str):
+ """All the offsets and magic numbers are hand tuned, and are unique to SO-100 follower arms"""
+ if (arm.read("Torque_Enable") != TorqueMode.DISABLED.value).any():
+ raise ValueError("To run calibration, the torque must be disabled on all motors.")
+
+ if not (robot_type == "moss" and arm_type == "follower"):
+ raise NotImplementedError("Auto calibration only supports the follower of moss arms for now.")
+
+ print(f"\nRunning calibration of {robot_type} {arm_name} {arm_type}...")
+
+ print("\nMove arm to initial position")
+ print("See: " + URL_TEMPLATE.format(robot=robot_type, arm=arm_type, position="initial"))
+ input("Press Enter to continue...")
+
+ # Lower the acceleration of the motors (in [0,254])
+ initial_acceleration = arm.read("Acceleration")
+ arm.write("Lock", 0)
+ arm.write("Acceleration", 10)
+ time.sleep(1)
+
+ arm.write("Torque_Enable", TorqueMode.ENABLED.value)
+
+ sl_pos = arm.read("Present_Position", "shoulder_lift")
+ arm.write("Goal_Position", sl_pos - 1024 - 450, "shoulder_lift")
+ ef_pos = arm.read("Present_Position", "elbow_flex")
+ arm.write("Goal_Position", ef_pos + 1024 + 450, "elbow_flex")
+ time.sleep(2)
+
+ calib = {}
+
+ print("Calibrate shoulder_pan")
+ calib["shoulder_pan"] = move_to_calibrate(arm, "shoulder_pan")
+ arm.write("Goal_Position", calib["shoulder_pan"]["zero_pos"], "shoulder_pan")
+ time.sleep(1)
+
+ print("Calibrate gripper")
+ calib["gripper"] = move_to_calibrate(arm, "gripper", invert_drive_mode=True)
+ time.sleep(1)
+
+ print("Calibrate wrist_flex")
+ calib["wrist_flex"] = move_to_calibrate(arm, "wrist_flex", invert_drive_mode=True)
+ calib["wrist_flex"] = apply_offset(calib["wrist_flex"], offset=-210 + 1024)
+
+ wr_pos = arm.read("Present_Position", "wrist_roll")
+ arm.write("Goal_Position", calib["wrist_flex"]["zero_pos"] - 1024, "wrist_flex")
+ time.sleep(1)
+ arm.write("Goal_Position", wr_pos - 1024, "wrist_roll")
+ time.sleep(1)
+ arm.write("Goal_Position", calib["wrist_flex"]["zero_pos"] - 2048, "wrist_flex")
+ time.sleep(1)
+ arm.write("Goal_Position", calib["gripper"]["end_pos"], "gripper")
+ time.sleep(1)
+
+ print("Calibrate wrist_roll")
+ calib["wrist_roll"] = move_to_calibrate(arm, "wrist_roll", invert_drive_mode=True)
+ calib["wrist_roll"] = apply_offset(calib["wrist_roll"], offset=790)
+
+ arm.write("Goal_Position", calib["wrist_roll"]["zero_pos"] - 1024, "wrist_roll")
+ arm.write("Goal_Position", calib["gripper"]["start_pos"], "gripper")
+ arm.write("Goal_Position", calib["wrist_flex"]["zero_pos"] - 1024, "wrist_flex")
+ time.sleep(1)
+ arm.write("Goal_Position", calib["wrist_roll"]["zero_pos"], "wrist_roll")
+ arm.write("Goal_Position", calib["wrist_flex"]["zero_pos"] - 2048, "wrist_flex")
+
+ def in_between_move_elbow_flex_hook():
+ nonlocal arm, calib
+ arm.write("Goal_Position", calib["wrist_flex"]["zero_pos"], "wrist_flex")
+
+ print("Calibrate elbow_flex")
+ calib["elbow_flex"] = move_to_calibrate(
+ arm,
+ "elbow_flex",
+ invert_drive_mode=True,
+ in_between_move_hook=in_between_move_elbow_flex_hook,
+ )
+ arm.write("Goal_Position", calib["wrist_flex"]["zero_pos"] - 1024, "wrist_flex")
+
+ def in_between_move_shoulder_lift_hook():
+ nonlocal arm, calib
+ sl = arm.read("Present_Position", "shoulder_lift")
+ arm.write("Goal_Position", sl - 1500, "shoulder_lift")
+ time.sleep(1)
+ arm.write("Goal_Position", calib["elbow_flex"]["zero_pos"] + 1536, "elbow_flex")
+ time.sleep(1)
+ arm.write("Goal_Position", calib["wrist_flex"]["start_pos"], "wrist_flex")
+ time.sleep(1)
+
+ print("Calibrate shoulder_lift")
+ calib["shoulder_lift"] = move_to_calibrate(
+ arm, "shoulder_lift", in_between_move_hook=in_between_move_shoulder_lift_hook
+ )
+ calib["shoulder_lift"] = apply_offset(calib["shoulder_lift"], offset=-1024)
+
+ arm.write("Goal_Position", calib["wrist_flex"]["zero_pos"] - 1024, "wrist_flex")
+ time.sleep(1)
+ arm.write("Goal_Position", calib["shoulder_lift"]["zero_pos"] + 2048, "shoulder_lift")
+ arm.write("Goal_Position", calib["elbow_flex"]["zero_pos"] - 1024 - 400, "elbow_flex")
+ time.sleep(2)
+
+ calib_modes = []
+ for name in arm.motor_names:
+ if name == "gripper":
+ calib_modes.append(CalibrationMode.LINEAR.name)
+ else:
+ calib_modes.append(CalibrationMode.DEGREE.name)
+
+ calib_dict = {
+ "homing_offset": [calib[name]["homing_offset"] for name in arm.motor_names],
+ "drive_mode": [calib[name]["drive_mode"] for name in arm.motor_names],
+ "start_pos": [calib[name]["start_pos"] for name in arm.motor_names],
+ "end_pos": [calib[name]["end_pos"] for name in arm.motor_names],
+ "calib_mode": calib_modes,
+ "motor_names": arm.motor_names,
+ }
+
+ # Re-enable original accerlation
+ arm.write("Lock", 0)
+ arm.write("Acceleration", initial_acceleration)
+ time.sleep(1)
+
+ return calib_dict
+
+
+def run_arm_manual_calibration(arm: MotorsBus, robot_type: str, arm_name: str, arm_type: str):
+ """This function ensures that a neural network trained on data collected on a given robot
+ can work on another robot. For instance before calibration, setting a same goal position
+ for each motor of two different robots will get two very different positions. But after calibration,
+ the two robots will move to the same position.To this end, this function computes the homing offset
+ and the drive mode for each motor of a given robot.
+
+ Homing offset is used to shift the motor position to a ]-2048, +2048[ nominal range (when the motor uses 2048 steps
+ to complete a half a turn). This range is set around an arbitrary "zero position" corresponding to all motor positions
+ being 0. During the calibration process, you will need to manually move the robot to this "zero position".
+
+ Drive mode is used to invert the rotation direction of the motor. This is useful when some motors have been assembled
+ in the opposite orientation for some robots. During the calibration process, you will need to manually move the robot
+ to the "rotated position".
+
+ After calibration, the homing offsets and drive modes are stored in a cache.
+
+ Example of usage:
+ ```python
+ run_arm_calibration(arm, "so100", "left", "follower")
+ ```
+ """
+ if (arm.read("Torque_Enable") != TorqueMode.DISABLED.value).any():
+ raise ValueError("To run calibration, the torque must be disabled on all motors.")
+
+ print(f"\nRunning calibration of {robot_type} {arm_name} {arm_type}...")
+
+ print("\nMove arm to zero position")
+ print("See: " + URL_TEMPLATE.format(robot=robot_type, arm=arm_type, position="zero"))
+ input("Press Enter to continue...")
+
+ # We arbitrarily chose our zero target position to be a straight horizontal position with gripper upwards and closed.
+ # It is easy to identify and all motors are in a "quarter turn" position. Once calibration is done, this position will
+ # correspond to every motor angle being 0. If you set all 0 as Goal Position, the arm will move in this position.
+ zero_target_pos = convert_degrees_to_steps(ZERO_POSITION_DEGREE, arm.motor_models)
+
+ # Compute homing offset so that `present_position + homing_offset ~= target_position`.
+ zero_pos = arm.read("Present_Position")
+ homing_offset = zero_target_pos - zero_pos
+
+ # The rotated target position corresponds to a rotation of a quarter turn from the zero position.
+ # This allows to identify the rotation direction of each motor.
+ # For instance, if the motor rotates 90 degree, and its value is -90 after applying the homing offset, then we know its rotation direction
+ # is inverted. However, for the calibration being successful, we need everyone to follow the same target position.
+ # Sometimes, there is only one possible rotation direction. For instance, if the gripper is closed, there is only one direction which
+ # corresponds to opening the gripper. When the rotation direction is ambiguous, we arbitrarely rotate clockwise from the point of view
+ # of the previous motor in the kinetic chain.
+ print("\nMove arm to rotated target position")
+ print("See: " + URL_TEMPLATE.format(robot=robot_type, arm=arm_type, position="rotated"))
+ input("Press Enter to continue...")
+
+ rotated_target_pos = convert_degrees_to_steps(ROTATED_POSITION_DEGREE, arm.motor_models)
+
+ # Find drive mode by rotating each motor by a quarter of a turn.
+ # Drive mode indicates if the motor rotation direction should be inverted (=1) or not (=0).
+ rotated_pos = arm.read("Present_Position")
+ drive_mode = (rotated_pos < zero_pos).astype(np.int32)
+
+ # Re-compute homing offset to take into account drive mode
+ rotated_drived_pos = apply_drive_mode(rotated_pos, drive_mode)
+ homing_offset = rotated_target_pos - rotated_drived_pos
+
+ print("\nMove arm to rest position")
+ print("See: " + URL_TEMPLATE.format(robot=robot_type, arm=arm_type, position="rest"))
+ input("Press Enter to continue...")
+ print()
+
+ # Joints with rotational motions are expressed in degrees in nominal range of [-180, 180]
+ calib_modes = []
+ for name in arm.motor_names:
+ if name == "gripper":
+ calib_modes.append(CalibrationMode.LINEAR.name)
+ else:
+ calib_modes.append(CalibrationMode.DEGREE.name)
+
+ calib_dict = {
+ "homing_offset": homing_offset.tolist(),
+ "drive_mode": drive_mode.tolist(),
+ "start_pos": zero_pos.tolist(),
+ "end_pos": rotated_pos.tolist(),
+ "calib_mode": calib_modes,
+ "motor_names": arm.motor_names,
+ }
+ return calib_dict
diff --git a/lerobot/common/robot_devices/robots/manipulator.py b/lerobot/common/robot_devices/robots/manipulator.py
index 337519765..618105064 100644
--- a/lerobot/common/robot_devices/robots/manipulator.py
+++ b/lerobot/common/robot_devices/robots/manipulator.py
@@ -1,3 +1,9 @@
+"""Contains logic to instantiate a robot, read information from its motors and cameras,
+and send orders to its motors.
+"""
+# TODO(rcadene, aliberts): reorganize the codebase into one file per robot, with the associated
+# calibration procedure, to make it easy for people to add their own robot.
+
import json
import logging
import time
@@ -10,138 +16,10 @@
import torch
from lerobot.common.robot_devices.cameras.utils import Camera
-from lerobot.common.robot_devices.motors.dynamixel import (
- CalibrationMode,
- TorqueMode,
- convert_degrees_to_steps,
-)
from lerobot.common.robot_devices.motors.utils import MotorsBus
from lerobot.common.robot_devices.robots.utils import get_arm_id
from lerobot.common.robot_devices.utils import RobotDeviceAlreadyConnectedError, RobotDeviceNotConnectedError
-########################################################################
-# Calibration logic
-########################################################################
-
-URL_TEMPLATE = (
- "https://raw.githubusercontent.com/huggingface/lerobot/main/media/{robot}/{arm}_{position}.webp"
-)
-
-# The following positions are provided in nominal degree range ]-180, +180[
-# For more info on these constants, see comments in the code where they get used.
-ZERO_POSITION_DEGREE = 0
-ROTATED_POSITION_DEGREE = 90
-
-
-def assert_drive_mode(drive_mode):
- # `drive_mode` is in [0,1] with 0 means original rotation direction for the motor, and 1 means inverted.
- if not np.all(np.isin(drive_mode, [0, 1])):
- raise ValueError(f"`drive_mode` contains values other than 0 or 1: ({drive_mode})")
-
-
-def apply_drive_mode(position, drive_mode):
- assert_drive_mode(drive_mode)
- # Convert `drive_mode` from [0, 1] with 0 indicates original rotation direction and 1 inverted,
- # to [-1, 1] with 1 indicates original rotation direction and -1 inverted.
- signed_drive_mode = -(drive_mode * 2 - 1)
- position *= signed_drive_mode
- return position
-
-
-def compute_nearest_rounded_position(position, models):
- delta_turn = convert_degrees_to_steps(ROTATED_POSITION_DEGREE, models)
- nearest_pos = np.round(position.astype(float) / delta_turn) * delta_turn
- return nearest_pos.astype(position.dtype)
-
-
-def run_arm_calibration(arm: MotorsBus, robot_type: str, arm_name: str, arm_type: str):
- """This function ensures that a neural network trained on data collected on a given robot
- can work on another robot. For instance before calibration, setting a same goal position
- for each motor of two different robots will get two very different positions. But after calibration,
- the two robots will move to the same position.To this end, this function computes the homing offset
- and the drive mode for each motor of a given robot.
-
- Homing offset is used to shift the motor position to a ]-2048, +2048[ nominal range (when the motor uses 2048 steps
- to complete a half a turn). This range is set around an arbitrary "zero position" corresponding to all motor positions
- being 0. During the calibration process, you will need to manually move the robot to this "zero position".
-
- Drive mode is used to invert the rotation direction of the motor. This is useful when some motors have been assembled
- in the opposite orientation for some robots. During the calibration process, you will need to manually move the robot
- to the "rotated position".
-
- After calibration, the homing offsets and drive modes are stored in a cache.
-
- Example of usage:
- ```python
- run_arm_calibration(arm, "koch", "left", "follower")
- ```
- """
- if (arm.read("Torque_Enable") != TorqueMode.DISABLED.value).any():
- raise ValueError("To run calibration, the torque must be disabled on all motors.")
-
- print(f"\nRunning calibration of {robot_type} {arm_name} {arm_type}...")
-
- print("\nMove arm to zero position")
- print("See: " + URL_TEMPLATE.format(robot=robot_type, arm=arm_type, position="zero"))
- input("Press Enter to continue...")
-
- # We arbitrarily chose our zero target position to be a straight horizontal position with gripper upwards and closed.
- # It is easy to identify and all motors are in a "quarter turn" position. Once calibration is done, this position will
- # correspond to every motor angle being 0. If you set all 0 as Goal Position, the arm will move in this position.
- zero_target_pos = convert_degrees_to_steps(ZERO_POSITION_DEGREE, arm.motor_models)
-
- # Compute homing offset so that `present_position + homing_offset ~= target_position`.
- zero_pos = arm.read("Present_Position")
- zero_nearest_pos = compute_nearest_rounded_position(zero_pos, arm.motor_models)
- homing_offset = zero_target_pos - zero_nearest_pos
-
- # The rotated target position corresponds to a rotation of a quarter turn from the zero position.
- # This allows to identify the rotation direction of each motor.
- # For instance, if the motor rotates 90 degree, and its value is -90 after applying the homing offset, then we know its rotation direction
- # is inverted. However, for the calibration being successful, we need everyone to follow the same target position.
- # Sometimes, there is only one possible rotation direction. For instance, if the gripper is closed, there is only one direction which
- # corresponds to opening the gripper. When the rotation direction is ambiguous, we arbitrarely rotate clockwise from the point of view
- # of the previous motor in the kinetic chain.
- print("\nMove arm to rotated target position")
- print("See: " + URL_TEMPLATE.format(robot=robot_type, arm=arm_type, position="rotated"))
- input("Press Enter to continue...")
-
- rotated_target_pos = convert_degrees_to_steps(ROTATED_POSITION_DEGREE, arm.motor_models)
-
- # Find drive mode by rotating each motor by a quarter of a turn.
- # Drive mode indicates if the motor rotation direction should be inverted (=1) or not (=0).
- rotated_pos = arm.read("Present_Position")
- drive_mode = (rotated_pos < zero_pos).astype(np.int32)
-
- # Re-compute homing offset to take into account drive mode
- rotated_drived_pos = apply_drive_mode(rotated_pos, drive_mode)
- rotated_nearest_pos = compute_nearest_rounded_position(rotated_drived_pos, arm.motor_models)
- homing_offset = rotated_target_pos - rotated_nearest_pos
-
- print("\nMove arm to rest position")
- print("See: " + URL_TEMPLATE.format(robot=robot_type, arm=arm_type, position="rest"))
- input("Press Enter to continue...")
- print()
-
- # Joints with rotational motions are expressed in degrees in nominal range of [-180, 180]
- calib_mode = [CalibrationMode.DEGREE.name] * len(arm.motor_names)
-
- # TODO(rcadene): make type of joints (DEGREE or LINEAR) configurable from yaml?
- if robot_type == "aloha" and "gripper" in arm.motor_names:
- # Joints with linear motions (like gripper of Aloha) are experessed in nominal range of [0, 100]
- calib_idx = arm.motor_names.index("gripper")
- calib_mode[calib_idx] = CalibrationMode.LINEAR.name
-
- calib_data = {
- "homing_offset": homing_offset.tolist(),
- "drive_mode": drive_mode.tolist(),
- "start_pos": zero_pos.tolist(),
- "end_pos": rotated_pos.tolist(),
- "calib_mode": calib_mode,
- "motor_names": arm.motor_names,
- }
- return calib_data
-
def ensure_safe_goal_position(
goal_pos: torch.Tensor, present_pos: torch.Tensor, max_relative_target: float | list[float]
@@ -163,11 +41,6 @@ def ensure_safe_goal_position(
return safe_goal_pos
-########################################################################
-# Manipulator robot
-########################################################################
-
-
@dataclass
class ManipulatorRobotConfig:
"""
@@ -178,7 +51,7 @@ class ManipulatorRobotConfig:
"""
# Define all components of the robot
- robot_type: str | None = None
+ robot_type: str = "koch"
leader_arms: dict[str, MotorsBus] = field(default_factory=lambda: {})
follower_arms: dict[str, MotorsBus] = field(default_factory=lambda: {})
cameras: dict[str, Camera] = field(default_factory=lambda: {})
@@ -207,6 +80,10 @@ def __setattr__(self, prop: str, val):
)
super().__setattr__(prop, val)
+ def __post_init__(self):
+ if self.robot_type not in ["koch", "koch_bimanual", "aloha", "so100", "moss"]:
+ raise ValueError(f"Provided robot type ({self.robot_type}) is not supported.")
+
class ManipulatorRobot:
# TODO(rcadene): Implement force feedback
@@ -349,6 +226,61 @@ def __init__(
self.is_connected = False
self.logs = {}
+ def get_motor_names(self, arm: dict[str, MotorsBus]) -> list:
+ return [f"{arm}_{motor}" for arm, bus in arm.items() for motor in bus.motors]
+
+ @property
+ def camera_features(self) -> dict:
+ cam_ft = {}
+ for cam_key, cam in self.cameras.items():
+ key = f"observation.images.{cam_key}"
+ cam_ft[key] = {
+ "shape": (cam.height, cam.width, cam.channels),
+ "names": ["height", "width", "channels"],
+ "info": None,
+ }
+ return cam_ft
+
+ @property
+ def motor_features(self) -> dict:
+ action_names = self.get_motor_names(self.leader_arms)
+ state_names = self.get_motor_names(self.leader_arms)
+ return {
+ "action": {
+ "dtype": "float32",
+ "shape": (len(action_names),),
+ "names": action_names,
+ },
+ "observation.state": {
+ "dtype": "float32",
+ "shape": (len(state_names),),
+ "names": state_names,
+ },
+ }
+
+ @property
+ def features(self):
+ return {**self.motor_features, **self.camera_features}
+
+ @property
+ def has_camera(self):
+ return len(self.cameras) > 0
+
+ @property
+ def num_cameras(self):
+ return len(self.cameras)
+
+ @property
+ def available_arms(self):
+ available_arms = []
+ for name in self.follower_arms:
+ arm_id = get_arm_id(name, "follower")
+ available_arms.append(arm_id)
+ for name in self.leader_arms:
+ arm_id = get_arm_id(name, "leader")
+ available_arms.append(arm_id)
+ return available_arms
+
def connect(self):
if self.is_connected:
raise RobotDeviceAlreadyConnectedError(
@@ -364,9 +296,15 @@ def connect(self):
for name in self.follower_arms:
print(f"Connecting {name} follower arm.")
self.follower_arms[name].connect()
+ for name in self.leader_arms:
print(f"Connecting {name} leader arm.")
self.leader_arms[name].connect()
+ if self.robot_type in ["koch", "koch_bimanual", "aloha"]:
+ from lerobot.common.robot_devices.motors.dynamixel import TorqueMode
+ elif self.robot_type in ["so100", "moss"]:
+ from lerobot.common.robot_devices.motors.feetech import TorqueMode
+
# We assume that at connection time, arms are in a rest position, and torque can
# be safely disabled to run calibration and/or set robot preset configurations.
for name in self.follower_arms:
@@ -377,12 +315,12 @@ def connect(self):
self.activate_calibration()
# Set robot preset (e.g. torque in leader gripper for Koch v1.1)
- if self.robot_type == "koch":
+ if self.robot_type in ["koch", "koch_bimanual"]:
self.set_koch_robot_preset()
elif self.robot_type == "aloha":
self.set_aloha_robot_preset()
- else:
- warnings.warn(f"No preset found for robot type: {self.robot_type}", stacklevel=1)
+ elif self.robot_type in ["so100", "moss"]:
+ self.set_so100_robot_preset()
# Enable torque on all motors of the follower arms
for name in self.follower_arms:
@@ -390,12 +328,22 @@ def connect(self):
self.follower_arms[name].write("Torque_Enable", 1)
if self.config.gripper_open_degree is not None:
+ if self.robot_type not in ["koch", "koch_bimanual"]:
+ raise NotImplementedError(
+ f"{self.robot_type} does not support position AND current control in the handle, which is require to set the gripper open."
+ )
# Set the leader arm in torque mode with the gripper motor set to an angle. This makes it possible
# to squeeze the gripper and have it spring back to an open position on its own.
for name in self.leader_arms:
self.leader_arms[name].write("Torque_Enable", 1, "gripper")
self.leader_arms[name].write("Goal_Position", self.config.gripper_open_degree, "gripper")
+ # Check both arms can be read
+ for name in self.follower_arms:
+ self.follower_arms[name].read("Present_Position")
+ for name in self.leader_arms:
+ self.leader_arms[name].read("Present_Position")
+
# Connect the cameras
for name in self.cameras:
self.cameras[name].connect()
@@ -416,8 +364,20 @@ def load_or_run_calibration_(name, arm, arm_type):
with open(arm_calib_path) as f:
calibration = json.load(f)
else:
+ # TODO(rcadene): display a warning in __init__ if calibration file not available
print(f"Missing calibration file '{arm_calib_path}'")
- calibration = run_arm_calibration(arm, self.robot_type, name, arm_type)
+
+ if self.robot_type in ["koch", "koch_bimanual", "aloha"]:
+ from lerobot.common.robot_devices.robots.dynamixel_calibration import run_arm_calibration
+
+ calibration = run_arm_calibration(arm, self.robot_type, name, arm_type)
+
+ elif self.robot_type in ["so100", "moss"]:
+ from lerobot.common.robot_devices.robots.feetech_calibration import (
+ run_arm_manual_calibration,
+ )
+
+ calibration = run_arm_manual_calibration(arm, self.robot_type, name, arm_type)
print(f"Calibration is done! Saving calibration file '{arm_calib_path}'")
arm_calib_path.parent.mkdir(parents=True, exist_ok=True)
@@ -435,6 +395,8 @@ def load_or_run_calibration_(name, arm, arm_type):
def set_koch_robot_preset(self):
def set_operating_mode_(arm):
+ from lerobot.common.robot_devices.motors.dynamixel import TorqueMode
+
if (arm.read("Torque_Enable") != TorqueMode.DISABLED.value).any():
raise ValueError("To run set robot preset, the torque must be disabled on all motors.")
@@ -522,6 +484,23 @@ def set_shadow_(arm):
stacklevel=1,
)
+ def set_so100_robot_preset(self):
+ for name in self.follower_arms:
+ # Mode=0 for Position Control
+ self.follower_arms[name].write("Mode", 0)
+ # Set P_Coefficient to lower value to avoid shakiness (Default is 32)
+ self.follower_arms[name].write("P_Coefficient", 16)
+ # Set I_Coefficient and D_Coefficient to default value 0 and 32
+ self.follower_arms[name].write("I_Coefficient", 0)
+ self.follower_arms[name].write("D_Coefficient", 32)
+ # Close the write lock so that Maximum_Acceleration gets written to EPROM address,
+ # which is mandatory for Maximum_Acceleration to take effect after rebooting.
+ self.follower_arms[name].write("Lock", 0)
+ # Set Maximum_Acceleration to 254 to speedup acceleration and deceleration of
+ # the motors. Note: this configuration is not in the official STS3215 Memory Table
+ self.follower_arms[name].write("Maximum_Acceleration", 254)
+ self.follower_arms[name].write("Acceleration", 254)
+
def teleop_step(
self, record_data=False
) -> None | tuple[dict[str, torch.Tensor], dict[str, torch.Tensor]]:
@@ -681,6 +660,10 @@ def send_action(self, action: torch.Tensor) -> torch.Tensor:
return torch.cat(action_sent)
+ def print_logs(self):
+ pass
+ # TODO(aliberts): move robot-specific logs logic here
+
def disconnect(self):
if not self.is_connected:
raise RobotDeviceNotConnectedError(
diff --git a/lerobot/common/robot_devices/robots/stretch.py b/lerobot/common/robot_devices/robots/stretch.py
new file mode 100644
index 000000000..ff86b6d80
--- /dev/null
+++ b/lerobot/common/robot_devices/robots/stretch.py
@@ -0,0 +1,216 @@
+#!/usr/bin/env python
+
+# Copyright 2024 The HuggingFace Inc. team. All rights reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import time
+from dataclasses import dataclass, field, replace
+
+import torch
+from stretch_body.gamepad_teleop import GamePadTeleop
+from stretch_body.robot import Robot as StretchAPI
+from stretch_body.robot_params import RobotParams
+
+from lerobot.common.robot_devices.cameras.utils import Camera
+
+
+@dataclass
+class StretchRobotConfig:
+ robot_type: str | None = "stretch"
+ cameras: dict[str, Camera] = field(default_factory=lambda: {})
+ # TODO(aliberts): add feature with max_relative target
+ # TODO(aliberts): add comment on max_relative target
+ max_relative_target: list[float] | float | None = None
+
+
+class StretchRobot(StretchAPI):
+ """Wrapper of stretch_body.robot.Robot"""
+
+ def __init__(self, config: StretchRobotConfig | None = None, **kwargs):
+ super().__init__()
+ if config is None:
+ config = StretchRobotConfig()
+ # Overwrite config arguments using kwargs
+ self.config = replace(config, **kwargs)
+
+ self.robot_type = self.config.robot_type
+ self.cameras = self.config.cameras
+ self.is_connected = False
+ self.teleop = None
+ self.logs = {}
+
+ # TODO(aliberts): test this
+ RobotParams.set_logging_level("WARNING")
+ RobotParams.set_logging_formatter("brief_console_formatter")
+
+ self.state_keys = None
+ self.action_keys = None
+
+ def connect(self) -> None:
+ self.is_connected = self.startup()
+ if not self.is_connected:
+ print("Another process is already using Stretch. Try running 'stretch_free_robot_process.py'")
+ raise ConnectionError()
+
+ for name in self.cameras:
+ self.cameras[name].connect()
+ self.is_connected = self.is_connected and self.cameras[name].is_connected
+
+ if not self.is_connected:
+ print("Could not connect to the cameras, check that all cameras are plugged-in.")
+ raise ConnectionError()
+
+ self.run_calibration()
+
+ def run_calibration(self) -> None:
+ if not self.is_homed():
+ self.home()
+
+ def teleop_step(
+ self, record_data=False
+ ) -> None | tuple[dict[str, torch.Tensor], dict[str, torch.Tensor]]:
+ # TODO(aliberts): return ndarrays instead of torch.Tensors
+ if not self.is_connected:
+ raise ConnectionError()
+
+ if self.teleop is None:
+ self.teleop = GamePadTeleop(robot_instance=False)
+ self.teleop.startup(robot=self)
+
+ before_read_t = time.perf_counter()
+ state = self.get_state()
+ action = self.teleop.gamepad_controller.get_state()
+ self.logs["read_pos_dt_s"] = time.perf_counter() - before_read_t
+
+ before_write_t = time.perf_counter()
+ self.teleop.do_motion(robot=self)
+ self.push_command()
+ self.logs["write_pos_dt_s"] = time.perf_counter() - before_write_t
+
+ if self.state_keys is None:
+ self.state_keys = list(state)
+
+ if not record_data:
+ return
+
+ state = torch.as_tensor(list(state.values()))
+ action = torch.as_tensor(list(action.values()))
+
+ # Capture images from cameras
+ images = {}
+ for name in self.cameras:
+ before_camread_t = time.perf_counter()
+ images[name] = self.cameras[name].async_read()
+ images[name] = torch.from_numpy(images[name])
+ self.logs[f"read_camera_{name}_dt_s"] = self.cameras[name].logs["delta_timestamp_s"]
+ self.logs[f"async_read_camera_{name}_dt_s"] = time.perf_counter() - before_camread_t
+
+ # Populate output dictionnaries
+ obs_dict, action_dict = {}, {}
+ obs_dict["observation.state"] = state
+ action_dict["action"] = action
+ for name in self.cameras:
+ obs_dict[f"observation.images.{name}"] = images[name]
+
+ return obs_dict, action_dict
+
+ def get_state(self) -> dict:
+ status = self.get_status()
+ return {
+ "head_pan.pos": status["head"]["head_pan"]["pos"],
+ "head_tilt.pos": status["head"]["head_tilt"]["pos"],
+ "lift.pos": status["lift"]["pos"],
+ "arm.pos": status["arm"]["pos"],
+ "wrist_pitch.pos": status["end_of_arm"]["wrist_pitch"]["pos"],
+ "wrist_roll.pos": status["end_of_arm"]["wrist_roll"]["pos"],
+ "wrist_yaw.pos": status["end_of_arm"]["wrist_yaw"]["pos"],
+ "gripper.pos": status["end_of_arm"]["stretch_gripper"]["pos"],
+ "base_x.vel": status["base"]["x_vel"],
+ "base_y.vel": status["base"]["y_vel"],
+ "base_theta.vel": status["base"]["theta_vel"],
+ }
+
+ def capture_observation(self) -> dict:
+ # TODO(aliberts): return ndarrays instead of torch.Tensors
+ before_read_t = time.perf_counter()
+ state = self.get_state()
+ self.logs["read_pos_dt_s"] = time.perf_counter() - before_read_t
+
+ if self.state_keys is None:
+ self.state_keys = list(state)
+
+ state = torch.as_tensor(list(state.values()))
+
+ # Capture images from cameras
+ images = {}
+ for name in self.cameras:
+ before_camread_t = time.perf_counter()
+ images[name] = self.cameras[name].async_read()
+ images[name] = torch.from_numpy(images[name])
+ self.logs[f"read_camera_{name}_dt_s"] = self.cameras[name].logs["delta_timestamp_s"]
+ self.logs[f"async_read_camera_{name}_dt_s"] = time.perf_counter() - before_camread_t
+
+ # Populate output dictionnaries
+ obs_dict = {}
+ obs_dict["observation.state"] = state
+ for name in self.cameras:
+ obs_dict[f"observation.images.{name}"] = images[name]
+
+ return obs_dict
+
+ def send_action(self, action: torch.Tensor) -> torch.Tensor:
+ # TODO(aliberts): return ndarrays instead of torch.Tensors
+ if not self.is_connected:
+ raise ConnectionError()
+
+ if self.teleop is None:
+ self.teleop = GamePadTeleop(robot_instance=False)
+ self.teleop.startup(robot=self)
+
+ if self.action_keys is None:
+ dummy_action = self.teleop.gamepad_controller.get_state()
+ self.action_keys = list(dummy_action.keys())
+
+ action_dict = dict(zip(self.action_keys, action.tolist(), strict=True))
+
+ before_write_t = time.perf_counter()
+ self.teleop.do_motion(state=action_dict, robot=self)
+ self.push_command()
+ self.logs["write_pos_dt_s"] = time.perf_counter() - before_write_t
+
+ # TODO(aliberts): return action_sent when motion is limited
+ return action
+
+ def print_logs(self) -> None:
+ pass
+ # TODO(aliberts): move robot-specific logs logic here
+
+ def teleop_safety_stop(self) -> None:
+ if self.teleop is not None:
+ self.teleop._safety_stop(robot=self)
+
+ def disconnect(self) -> None:
+ self.stop()
+ if self.teleop is not None:
+ self.teleop.gamepad_controller.stop()
+ self.teleop.stop()
+
+ if len(self.cameras) > 0:
+ for cam in self.cameras.values():
+ cam.disconnect()
+
+ self.is_connected = False
+
+ def __del__(self):
+ self.disconnect()
diff --git a/lerobot/common/robot_devices/robots/utils.py b/lerobot/common/robot_devices/robots/utils.py
index 122155f78..a40db1312 100644
--- a/lerobot/common/robot_devices/robots/utils.py
+++ b/lerobot/common/robot_devices/robots/utils.py
@@ -9,8 +9,13 @@ def get_arm_id(name, arm_type):
class Robot(Protocol):
- def init_teleop(self): ...
+ # TODO(rcadene, aliberts): Add unit test checking the protocol is implemented in the corresponding classes
+ robot_type: str
+ features: dict
+
+ def connect(self): ...
def run_calibration(self): ...
def teleop_step(self, record_data=False): ...
def capture_observation(self): ...
def send_action(self, action): ...
+ def disconnect(self): ...
diff --git a/lerobot/common/robot_devices/utils.py b/lerobot/common/robot_devices/utils.py
index bcbeb8e02..19bb637e5 100644
--- a/lerobot/common/robot_devices/utils.py
+++ b/lerobot/common/robot_devices/utils.py
@@ -16,6 +16,20 @@ def busy_wait(seconds):
time.sleep(seconds)
+def safe_disconnect(func):
+ # TODO(aliberts): Allow to pass custom exceptions
+ # (e.g. ThreadServiceExit, KeyboardInterrupt, SystemExit, UnpluggedError, DynamixelCommError)
+ def wrapper(robot, *args, **kwargs):
+ try:
+ return func(robot, *args, **kwargs)
+ except Exception as e:
+ if robot.is_connected:
+ robot.disconnect()
+ raise e
+
+ return wrapper
+
+
class RobotDeviceNotConnectedError(Exception):
"""Exception raised when the robot device is not connected."""
diff --git a/lerobot/common/utils/utils.py b/lerobot/common/utils/utils.py
index 1aa0bc2d4..8e6b160af 100644
--- a/lerobot/common/utils/utils.py
+++ b/lerobot/common/utils/utils.py
@@ -16,16 +16,23 @@
import logging
import os
import os.path as osp
+import platform
import random
from contextlib import contextmanager
from datetime import datetime, timezone
from pathlib import Path
-from typing import Any, Generator
+from typing import Any, Callable, Generator
import hydra
import numpy as np
import torch
-from omegaconf import DictConfig
+from omegaconf import DictConfig, OmegaConf
+
+
+def none_or_int(value):
+ if value == "None":
+ return None
+ return int(value)
def inside_slurm():
@@ -34,12 +41,12 @@ def inside_slurm():
return "SLURM_JOB_ID" in os.environ
-def get_safe_torch_device(cfg_device: str, log: bool = False) -> torch.device:
+def get_safe_torch_device(cfg_device: str, log: bool = False, accelerator: Callable = None) -> torch.device:
"""Given a string, return a torch.device with checks on whether the device is available."""
match cfg_device:
case "cuda":
assert torch.cuda.is_available()
- device = torch.device("cuda")
+ device = accelerator.device if accelerator else torch.device("cuda")
case "mps":
assert torch.backends.mps.is_available()
device = torch.device("mps")
@@ -80,13 +87,17 @@ def set_global_random_state(random_state_dict: dict[str, Any]):
torch.cuda.random.set_rng_state(random_state_dict["torch_cuda_random_state"])
-def set_global_seed(seed):
+def set_global_seed(seed, accelerator: Callable = None):
"""Set seed for reproducibility."""
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
if torch.cuda.is_available():
torch.cuda.manual_seed_all(seed)
+ if accelerator:
+ from accelerate.utils import set_seed
+
+ set_seed(seed)
@contextmanager
@@ -108,7 +119,7 @@ def seeded_context(seed: int) -> Generator[None, None, None]:
set_global_random_state(random_state_dict)
-def init_logging():
+def init_logging(accelerator: Callable = None):
def custom_format(record):
dt = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
fnameline = f"{record.pathname}:{record.lineno}"
@@ -126,6 +137,11 @@ def custom_format(record):
console_handler.setFormatter(formatter)
logging.getLogger().addHandler(console_handler)
+ if accelerator is not None and not accelerator.is_main_process:
+ # Disable duplicate logging on non-main processes
+ logging.info(f"Setting logging level on non-main process {accelerator.process_index} to WARNING.")
+ logging.getLogger().setLevel(logging.WARNING)
+
def format_big_number(num, precision=0):
suffixes = ["", "K", "M", "B", "T", "Q"]
@@ -183,3 +199,54 @@ def print_cuda_memory_usage():
def capture_timestamp_utc():
return datetime.now(timezone.utc)
+
+
+def say(text, blocking=False):
+ # Check if mac, linux, or windows.
+ if platform.system() == "Darwin":
+ cmd = f'say "{text}"'
+ if not blocking:
+ cmd += " &"
+ elif platform.system() == "Linux":
+ cmd = f'spd-say "{text}"'
+ if blocking:
+ cmd += " --wait"
+ elif platform.system() == "Windows":
+ # TODO(rcadene): Make blocking option work for Windows
+ cmd = (
+ 'PowerShell -Command "Add-Type -AssemblyName System.Speech; '
+ f"(New-Object System.Speech.Synthesis.SpeechSynthesizer).Speak('{text}')\""
+ )
+
+ os.system(cmd)
+
+
+def log_say(text, play_sounds, blocking=False):
+ logging.info(text)
+
+ if play_sounds:
+ say(text, blocking)
+
+
+def is_launched_with_accelerate() -> bool:
+ return "ACCELERATE_MIXED_PRECISION" in os.environ
+
+
+def get_accelerate_config(accelerator: Callable = None) -> dict[str, Any]:
+ config = {}
+ if not accelerator:
+ return config
+ config["num_processes"] = accelerator.num_processes
+ config["device"] = str(accelerator.device)
+ config["distributed_type"] = str(accelerator.distributed_type)
+ config["mixed_precision"] = accelerator.mixed_precision
+ config["gradient_accumulation_steps"] = accelerator.gradient_accumulation_steps
+
+ return config
+
+
+def update_omegaconf(cfg: DictConfig, config_name: str, config: dict[str, Any]) -> DictConfig:
+ cfg_dict = OmegaConf.to_container(cfg, resolve=True)
+ cfg_dict[config_name] = config
+ cfg = OmegaConf.create(cfg_dict)
+ return cfg
diff --git a/lerobot/configs/default.yaml b/lerobot/configs/default.yaml
index a3ff1d41b..ba42a10c1 100644
--- a/lerobot/configs/default.yaml
+++ b/lerobot/configs/default.yaml
@@ -37,7 +37,7 @@ training:
num_workers: 4
batch_size: ???
-
+ lr_scheduler:
eval_freq: ???
log_freq: 200
save_checkpoint: true
@@ -121,6 +121,8 @@ eval:
batch_size: 1
# `use_async_envs` specifies whether to use asynchronous environments (multiprocessing).
use_async_envs: false
+ # maximum number of episodes to render into videos.
+ max_episodes_rendered: 10
wandb:
enable: false
diff --git a/lerobot/configs/env/aloha_real.yaml b/lerobot/configs/env/aloha_real.yaml
new file mode 100644
index 000000000..57af4be20
--- /dev/null
+++ b/lerobot/configs/env/aloha_real.yaml
@@ -0,0 +1,10 @@
+# @package _global_
+
+fps: 30
+
+env:
+ name: real_world
+ task: null
+ state_dim: 18
+ action_dim: 18
+ fps: ${fps}
diff --git a/lerobot/configs/env/moss_real.yaml b/lerobot/configs/env/moss_real.yaml
new file mode 100644
index 000000000..8e65d72f4
--- /dev/null
+++ b/lerobot/configs/env/moss_real.yaml
@@ -0,0 +1,10 @@
+# @package _global_
+
+fps: 30
+
+env:
+ name: real_world
+ task: null
+ state_dim: 6
+ action_dim: 6
+ fps: ${fps}
diff --git a/lerobot/configs/env/so100_real.yaml b/lerobot/configs/env/so100_real.yaml
new file mode 100644
index 000000000..8e65d72f4
--- /dev/null
+++ b/lerobot/configs/env/so100_real.yaml
@@ -0,0 +1,10 @@
+# @package _global_
+
+fps: 30
+
+env:
+ name: real_world
+ task: null
+ state_dim: 6
+ action_dim: 6
+ fps: ${fps}
diff --git a/lerobot/configs/policy/act_real.yaml b/lerobot/configs/policy/act_aloha_real.yaml
similarity index 76%
rename from lerobot/configs/policy/act_real.yaml
rename to lerobot/configs/policy/act_aloha_real.yaml
index 058104f4d..7c8094da1 100644
--- a/lerobot/configs/policy/act_real.yaml
+++ b/lerobot/configs/policy/act_aloha_real.yaml
@@ -1,16 +1,22 @@
# @package _global_
-# Use `act_real.yaml` to train on real-world Aloha/Aloha2 datasets.
-# Compared to `act.yaml`, it contains 4 cameras (i.e. cam_right_wrist, cam_left_wrist, images,
-# cam_low) instead of 1 camera (i.e. top). Also, `training.eval_freq` is set to -1. This config is used
-# to evaluate checkpoints at a certain frequency of training steps. When it is set to -1, it deactivates evaluation.
-# This is because real-world evaluation is done through [dora-lerobot](https://github.com/dora-rs/dora-lerobot).
-# Look at its README for more information on how to evaluate a checkpoint in the real-world.
+# Use `act_aloha_real.yaml` to train on real-world datasets collected on Aloha or Aloha-2 robots.
+# Compared to `act.yaml`, it contains 4 cameras (i.e. cam_right_wrist, cam_left_wrist, cam_high, cam_low) instead of 1 camera (i.e. top).
+# Also, `training.eval_freq` is set to -1. This config is used to evaluate checkpoints at a certain frequency of training steps.
+# When it is set to -1, it deactivates evaluation. This is because real-world evaluation is done through our `control_robot.py` script.
+# Look at the documentation in header of `control_robot.py` for more information on how to collect data , train and evaluate a policy.
#
-# Example of usage for training:
+# Example of usage for training and inference with `control_robot.py`:
# ```bash
# python lerobot/scripts/train.py \
-# policy=act_real \
+# policy=act_aloha_real \
+# env=aloha_real
+# ```
+#
+# Example of usage for training and inference with [Dora-rs](https://github.com/dora-rs/dora-lerobot):
+# ```bash
+# python lerobot/scripts/train.py \
+# policy=act_aloha_real \
# env=dora_aloha_real
# ```
@@ -36,10 +42,11 @@ override_dataset_stats:
std: [[[0.229]], [[0.224]], [[0.225]]] # (c,1,1)
training:
- offline_steps: 100000
+ offline_steps: 80000
online_steps: 0
eval_freq: -1
- save_freq: 20000
+ save_freq: 10000
+ log_freq: 100
save_checkpoint: true
batch_size: 8
@@ -62,7 +69,7 @@ policy:
# Input / output structure.
n_obs_steps: 1
- chunk_size: 100 # chunk_size
+ chunk_size: 100
n_action_steps: 100
input_shapes:
diff --git a/lerobot/configs/policy/act_koch_real.yaml b/lerobot/configs/policy/act_koch_real.yaml
index fd4bf3b59..6ddebab14 100644
--- a/lerobot/configs/policy/act_koch_real.yaml
+++ b/lerobot/configs/policy/act_koch_real.yaml
@@ -95,7 +95,7 @@ policy:
n_vae_encoder_layers: 4
# Inference.
- temporal_ensemble_momentum: null
+ temporal_ensemble_coeff: null
# Training and loss computation.
dropout: 0.1
diff --git a/lerobot/configs/policy/act_real_no_state.yaml b/lerobot/configs/policy/act_moss_real.yaml
similarity index 57%
rename from lerobot/configs/policy/act_real_no_state.yaml
rename to lerobot/configs/policy/act_moss_real.yaml
index 082610503..d996c3597 100644
--- a/lerobot/configs/policy/act_real_no_state.yaml
+++ b/lerobot/configs/policy/act_moss_real.yaml
@@ -1,43 +1,37 @@
# @package _global_
-# Use `act_real_no_state.yaml` to train on real-world Aloha/Aloha2 datasets when cameras are moving (e.g. wrist cameras)
-# Compared to `act_real.yaml`, it is camera only and does not use the state as input which is vector of robot joint positions.
-# We validated experimentaly that not using state reaches better success rate. Our hypothesis is that `act_real.yaml` might
-# overfits to the state, because the images are more complex to learn from since they are moving.
+# Use `act_koch_real.yaml` to train on real-world datasets collected on Alexander Koch's robots.
+# Compared to `act.yaml`, it contains 2 cameras (i.e. laptop, phone) instead of 1 camera (i.e. top).
+# Also, `training.eval_freq` is set to -1. This config is used to evaluate checkpoints at a certain frequency of training steps.
+# When it is set to -1, it deactivates evaluation. This is because real-world evaluation is done through our `control_robot.py` script.
+# Look at the documentation in header of `control_robot.py` for more information on how to collect data , train and evaluate a policy.
#
# Example of usage for training:
# ```bash
# python lerobot/scripts/train.py \
-# policy=act_real_no_state \
-# env=dora_aloha_real
+# policy=act_koch_real \
+# env=koch_real
# ```
seed: 1000
-dataset_repo_id: lerobot/aloha_static_vinh_cup
+dataset_repo_id: lerobot/moss_pick_place_lego
override_dataset_stats:
- observation.images.cam_right_wrist:
+ observation.images.laptop:
# stats from imagenet, since we use a pretrained vision model
mean: [[[0.485]], [[0.456]], [[0.406]]] # (c,1,1)
std: [[[0.229]], [[0.224]], [[0.225]]] # (c,1,1)
- observation.images.cam_left_wrist:
- # stats from imagenet, since we use a pretrained vision model
- mean: [[[0.485]], [[0.456]], [[0.406]]] # (c,1,1)
- std: [[[0.229]], [[0.224]], [[0.225]]] # (c,1,1)
- observation.images.cam_high:
- # stats from imagenet, since we use a pretrained vision model
- mean: [[[0.485]], [[0.456]], [[0.406]]] # (c,1,1)
- std: [[[0.229]], [[0.224]], [[0.225]]] # (c,1,1)
- observation.images.cam_low:
+ observation.images.phone:
# stats from imagenet, since we use a pretrained vision model
mean: [[[0.485]], [[0.456]], [[0.406]]] # (c,1,1)
std: [[[0.229]], [[0.224]], [[0.225]]] # (c,1,1)
training:
- offline_steps: 100000
+ offline_steps: 80000
online_steps: 0
eval_freq: -1
- save_freq: 20000
+ save_freq: 10000
+ log_freq: 100
save_checkpoint: true
batch_size: 8
@@ -60,24 +54,22 @@ policy:
# Input / output structure.
n_obs_steps: 1
- chunk_size: 100 # chunk_size
+ chunk_size: 100
n_action_steps: 100
input_shapes:
# TODO(rcadene, alexander-soare): add variables for height and width from the dataset/env?
- observation.images.cam_right_wrist: [3, 480, 640]
- observation.images.cam_left_wrist: [3, 480, 640]
- observation.images.cam_high: [3, 480, 640]
- observation.images.cam_low: [3, 480, 640]
+ observation.images.laptop: [3, 480, 640]
+ observation.images.phone: [3, 480, 640]
+ observation.state: ["${env.state_dim}"]
output_shapes:
action: ["${env.action_dim}"]
# Normalization / Unnormalization
input_normalization_modes:
- observation.images.cam_right_wrist: mean_std
- observation.images.cam_left_wrist: mean_std
- observation.images.cam_high: mean_std
- observation.images.cam_low: mean_std
+ observation.images.laptop: mean_std
+ observation.images.phone: mean_std
+ observation.state: mean_std
output_normalization_modes:
action: mean_std
diff --git a/lerobot/configs/policy/act_pusht.yaml b/lerobot/configs/policy/act_pusht.yaml
new file mode 100644
index 000000000..4963e11c0
--- /dev/null
+++ b/lerobot/configs/policy/act_pusht.yaml
@@ -0,0 +1,87 @@
+# @package _global_
+
+# Change the seed to match what PushT eval uses
+# (to avoid evaluating on seeds used for generating the training data).
+seed: 100000
+# Change the dataset repository to the PushT one.
+dataset_repo_id: lerobot/pusht
+
+override_dataset_stats:
+ observation.image:
+ # stats from imagenet, since we use a pretrained vision model
+ mean: [[[0.485]], [[0.456]], [[0.406]]] # (c,1,1)
+ std: [[[0.229]], [[0.224]], [[0.225]]] # (c,1,1)
+
+training:
+ offline_steps: 80000
+ online_steps: 0
+ eval_freq: 10000
+ save_freq: 100000
+ log_freq: 250
+ save_model: true
+
+ batch_size: 8
+ lr: 1e-5
+ lr_backbone: 1e-5
+ weight_decay: 1e-4
+ grad_clip_norm: 10
+ online_steps_between_rollouts: 1
+
+ delta_timestamps:
+ action: "[i / ${fps} for i in range(${policy.chunk_size})]"
+
+eval:
+ n_episodes: 50
+ batch_size: 50
+
+# See `configuration_act.py` for more details.
+policy:
+ name: act
+
+ # Input / output structure.
+ n_obs_steps: 1
+ chunk_size: 100 # chunk_size
+ n_action_steps: 100
+
+ input_shapes:
+ observation.image: [3, 96, 96]
+ observation.state: ["${env.state_dim}"]
+ output_shapes:
+ action: ["${env.action_dim}"]
+
+ # Normalization / Unnormalization
+ input_normalization_modes:
+ observation.image: mean_std
+ # Use min_max normalization just because it's more standard.
+ observation.state: min_max
+ output_normalization_modes:
+ # Use min_max normalization just because it's more standard.
+ action: min_max
+
+ # Architecture.
+ # Vision backbone.
+ vision_backbone: resnet18
+ pretrained_backbone_weights: ResNet18_Weights.IMAGENET1K_V1
+ replace_final_stride_with_dilation: false
+ # Transformer layers.
+ pre_norm: false
+ dim_model: 512
+ n_heads: 8
+ dim_feedforward: 3200
+ feedforward_activation: relu
+ n_encoder_layers: 4
+ # Note: Although the original ACT implementation has 7 for `n_decoder_layers`, there is a bug in the code
+ # that means only the first layer is used. Here we match the original implementation by setting this to 1.
+ # See this issue https://github.com/tonyzhaozh/act/issues/25#issue-2258740521.
+ n_decoder_layers: 1
+ # VAE.
+ use_vae: true
+ latent_dim: 32
+ n_vae_encoder_layers: 4
+
+ # Inference.
+ temporal_ensemble_coeff: null
+
+ # Training and loss computation.
+ dropout: 0.1
+ kl_weight: 10.0
diff --git a/lerobot/configs/policy/act_so100_real.yaml b/lerobot/configs/policy/act_so100_real.yaml
new file mode 100644
index 000000000..cf5b1f147
--- /dev/null
+++ b/lerobot/configs/policy/act_so100_real.yaml
@@ -0,0 +1,102 @@
+# @package _global_
+
+# Use `act_koch_real.yaml` to train on real-world datasets collected on Alexander Koch's robots.
+# Compared to `act.yaml`, it contains 2 cameras (i.e. laptop, phone) instead of 1 camera (i.e. top).
+# Also, `training.eval_freq` is set to -1. This config is used to evaluate checkpoints at a certain frequency of training steps.
+# When it is set to -1, it deactivates evaluation. This is because real-world evaluation is done through our `control_robot.py` script.
+# Look at the documentation in header of `control_robot.py` for more information on how to collect data , train and evaluate a policy.
+#
+# Example of usage for training:
+# ```bash
+# python lerobot/scripts/train.py \
+# policy=act_koch_real \
+# env=koch_real
+# ```
+
+seed: 1000
+dataset_repo_id: lerobot/so100_pick_place_lego
+
+override_dataset_stats:
+ observation.images.laptop:
+ # stats from imagenet, since we use a pretrained vision model
+ mean: [[[0.485]], [[0.456]], [[0.406]]] # (c,1,1)
+ std: [[[0.229]], [[0.224]], [[0.225]]] # (c,1,1)
+ observation.images.phone:
+ # stats from imagenet, since we use a pretrained vision model
+ mean: [[[0.485]], [[0.456]], [[0.406]]] # (c,1,1)
+ std: [[[0.229]], [[0.224]], [[0.225]]] # (c,1,1)
+
+training:
+ offline_steps: 80000
+ online_steps: 0
+ eval_freq: -1
+ save_freq: 10000
+ log_freq: 100
+ save_checkpoint: true
+
+ batch_size: 8
+ lr: 1e-5
+ lr_backbone: 1e-5
+ weight_decay: 1e-4
+ grad_clip_norm: 10
+ online_steps_between_rollouts: 1
+
+ delta_timestamps:
+ action: "[i / ${fps} for i in range(${policy.chunk_size})]"
+
+eval:
+ n_episodes: 50
+ batch_size: 50
+
+# See `configuration_act.py` for more details.
+policy:
+ name: act
+
+ # Input / output structure.
+ n_obs_steps: 1
+ chunk_size: 100
+ n_action_steps: 100
+
+ input_shapes:
+ # TODO(rcadene, alexander-soare): add variables for height and width from the dataset/env?
+ observation.images.laptop: [3, 480, 640]
+ observation.images.phone: [3, 480, 640]
+ observation.state: ["${env.state_dim}"]
+ output_shapes:
+ action: ["${env.action_dim}"]
+
+ # Normalization / Unnormalization
+ input_normalization_modes:
+ observation.images.laptop: mean_std
+ observation.images.phone: mean_std
+ observation.state: mean_std
+ output_normalization_modes:
+ action: mean_std
+
+ # Architecture.
+ # Vision backbone.
+ vision_backbone: resnet18
+ pretrained_backbone_weights: ResNet18_Weights.IMAGENET1K_V1
+ replace_final_stride_with_dilation: false
+ # Transformer layers.
+ pre_norm: false
+ dim_model: 512
+ n_heads: 8
+ dim_feedforward: 3200
+ feedforward_activation: relu
+ n_encoder_layers: 4
+ # Note: Although the original ACT implementation has 7 for `n_decoder_layers`, there is a bug in the code
+ # that means only the first layer is used. Here we match the original implementation by setting this to 1.
+ # See this issue https://github.com/tonyzhaozh/act/issues/25#issue-2258740521.
+ n_decoder_layers: 1
+ # VAE.
+ use_vae: true
+ latent_dim: 32
+ n_vae_encoder_layers: 4
+
+ # Inference.
+ temporal_ensemble_coeff: null
+
+ # Training and loss computation.
+ dropout: 0.1
+ kl_weight: 10.0
diff --git a/lerobot/configs/robot/aloha.yaml b/lerobot/configs/robot/aloha.yaml
index 938fa2e3d..d8bca515f 100644
--- a/lerobot/configs/robot/aloha.yaml
+++ b/lerobot/configs/robot/aloha.yaml
@@ -1,11 +1,13 @@
-# Aloha: A Low-Cost Hardware for Bimanual Teleoperation
+# [Aloha: A Low-Cost Hardware for Bimanual Teleoperation](https://www.trossenrobotics.com/aloha-stationary)
# https://aloha-2.github.io
-# https://www.trossenrobotics.com/aloha-stationary
# Requires installing extras packages
# With pip: `pip install -e ".[dynamixel intelrealsense]"`
# With poetry: `poetry install --sync --extras "dynamixel intelrealsense"`
+# See [tutorial](https://github.com/huggingface/lerobot/blob/main/examples/9_use_aloha.md)
+
+
_target_: lerobot.common.robot_devices.robots.manipulator.ManipulatorRobot
robot_type: aloha
# Specific to Aloha, LeRobot comes with default calibration files. Assuming the motors have been
@@ -91,25 +93,25 @@ follower_arms:
cameras:
cam_high:
_target_: lerobot.common.robot_devices.cameras.intelrealsense.IntelRealSenseCamera
- camera_index: 128422271347
+ serial_number: 128422271347
fps: 30
width: 640
height: 480
cam_low:
_target_: lerobot.common.robot_devices.cameras.intelrealsense.IntelRealSenseCamera
- camera_index: 130322270656
+ serial_number: 130322270656
fps: 30
width: 640
height: 480
cam_left_wrist:
_target_: lerobot.common.robot_devices.cameras.intelrealsense.IntelRealSenseCamera
- camera_index: 218622272670
+ serial_number: 218622272670
fps: 30
width: 640
height: 480
cam_right_wrist:
_target_: lerobot.common.robot_devices.cameras.intelrealsense.IntelRealSenseCamera
- camera_index: 130322272300
+ serial_number: 130322272300
fps: 30
width: 640
height: 480
diff --git a/lerobot/configs/robot/koch_bimanual.yaml b/lerobot/configs/robot/koch_bimanual.yaml
index 7f8138675..b551d15de 100644
--- a/lerobot/configs/robot/koch_bimanual.yaml
+++ b/lerobot/configs/robot/koch_bimanual.yaml
@@ -1,5 +1,5 @@
_target_: lerobot.common.robot_devices.robots.manipulator.ManipulatorRobot
-robot_type: koch
+robot_type: koch_bimanual
calibration_dir: .cache/calibration/koch_bimanual
# `max_relative_target` limits the magnitude of the relative positional target vector for safety purposes.
diff --git a/lerobot/configs/robot/moss.yaml b/lerobot/configs/robot/moss.yaml
new file mode 100644
index 000000000..8a9019851
--- /dev/null
+++ b/lerobot/configs/robot/moss.yaml
@@ -0,0 +1,56 @@
+# [Moss v1 robot arm](https://github.com/jess-moss/moss-robot-arms)
+
+# Requires installing extras packages
+# With pip: `pip install -e ".[feetech]"`
+# With poetry: `poetry install --sync --extras "feetech"`
+
+# See [tutorial](https://github.com/huggingface/lerobot/blob/main/examples/11_use_moss.md)
+
+_target_: lerobot.common.robot_devices.robots.manipulator.ManipulatorRobot
+robot_type: moss
+calibration_dir: .cache/calibration/moss
+
+# `max_relative_target` limits the magnitude of the relative positional target vector for safety purposes.
+# Set this to a positive scalar to have the same value for all motors, or a list that is the same length as
+# the number of motors in your follower arms.
+max_relative_target: null
+
+leader_arms:
+ main:
+ _target_: lerobot.common.robot_devices.motors.feetech.FeetechMotorsBus
+ port: /dev/tty.usbmodem58760431091
+ motors:
+ # name: (index, model)
+ shoulder_pan: [1, "sts3215"]
+ shoulder_lift: [2, "sts3215"]
+ elbow_flex: [3, "sts3215"]
+ wrist_flex: [4, "sts3215"]
+ wrist_roll: [5, "sts3215"]
+ gripper: [6, "sts3215"]
+
+follower_arms:
+ main:
+ _target_: lerobot.common.robot_devices.motors.feetech.FeetechMotorsBus
+ port: /dev/tty.usbmodem58760431191
+ motors:
+ # name: (index, model)
+ shoulder_pan: [1, "sts3215"]
+ shoulder_lift: [2, "sts3215"]
+ elbow_flex: [3, "sts3215"]
+ wrist_flex: [4, "sts3215"]
+ wrist_roll: [5, "sts3215"]
+ gripper: [6, "sts3215"]
+
+cameras:
+ laptop:
+ _target_: lerobot.common.robot_devices.cameras.opencv.OpenCVCamera
+ camera_index: 0
+ fps: 30
+ width: 640
+ height: 480
+ phone:
+ _target_: lerobot.common.robot_devices.cameras.opencv.OpenCVCamera
+ camera_index: 1
+ fps: 30
+ width: 640
+ height: 480
diff --git a/lerobot/configs/robot/so100.yaml b/lerobot/configs/robot/so100.yaml
new file mode 100644
index 000000000..ec6f3e3fe
--- /dev/null
+++ b/lerobot/configs/robot/so100.yaml
@@ -0,0 +1,56 @@
+# [SO-100 robot arm](https://github.com/TheRobotStudio/SO-ARM100)
+
+# Requires installing extras packages
+# With pip: `pip install -e ".[feetech]"`
+# With poetry: `poetry install --sync --extras "feetech"`
+
+# See [tutorial](https://github.com/huggingface/lerobot/blob/main/examples/10_use_so100.md)
+
+_target_: lerobot.common.robot_devices.robots.manipulator.ManipulatorRobot
+robot_type: so100
+calibration_dir: .cache/calibration/so100
+
+# `max_relative_target` limits the magnitude of the relative positional target vector for safety purposes.
+# Set this to a positive scalar to have the same value for all motors, or a list that is the same length as
+# the number of motors in your follower arms.
+max_relative_target: null
+
+leader_arms:
+ main:
+ _target_: lerobot.common.robot_devices.motors.feetech.FeetechMotorsBus
+ port: /dev/tty.usbmodem585A0077581
+ motors:
+ # name: (index, model)
+ shoulder_pan: [1, "sts3215"]
+ shoulder_lift: [2, "sts3215"]
+ elbow_flex: [3, "sts3215"]
+ wrist_flex: [4, "sts3215"]
+ wrist_roll: [5, "sts3215"]
+ gripper: [6, "sts3215"]
+
+follower_arms:
+ main:
+ _target_: lerobot.common.robot_devices.motors.feetech.FeetechMotorsBus
+ port: /dev/tty.usbmodem585A0080971
+ motors:
+ # name: (index, model)
+ shoulder_pan: [1, "sts3215"]
+ shoulder_lift: [2, "sts3215"]
+ elbow_flex: [3, "sts3215"]
+ wrist_flex: [4, "sts3215"]
+ wrist_roll: [5, "sts3215"]
+ gripper: [6, "sts3215"]
+
+cameras:
+ laptop:
+ _target_: lerobot.common.robot_devices.cameras.opencv.OpenCVCamera
+ camera_index: 0
+ fps: 30
+ width: 640
+ height: 480
+ phone:
+ _target_: lerobot.common.robot_devices.cameras.opencv.OpenCVCamera
+ camera_index: 1
+ fps: 30
+ width: 640
+ height: 480
diff --git a/lerobot/configs/robot/stretch.yaml b/lerobot/configs/robot/stretch.yaml
new file mode 100644
index 000000000..e29966b6f
--- /dev/null
+++ b/lerobot/configs/robot/stretch.yaml
@@ -0,0 +1,33 @@
+# [Stretch3 from Hello Robot](https://hello-robot.com/stretch-3-product)
+
+# Requires installing extras packages
+# With pip: `pip install -e ".[stretch]"`
+# With poetry: `poetry install --sync --extras "stretch"`
+
+# See [tutorial](https://github.com/huggingface/lerobot/blob/main/examples/8_use_stretch.md)
+
+
+_target_: lerobot.common.robot_devices.robots.stretch.StretchRobot
+robot_type: stretch3
+
+cameras:
+ navigation:
+ _target_: lerobot.common.robot_devices.cameras.opencv.OpenCVCamera
+ camera_index: /dev/hello-nav-head-camera
+ fps: 10
+ width: 1280
+ height: 720
+ rotation: -90
+ head:
+ _target_: lerobot.common.robot_devices.cameras.intelrealsense.IntelRealSenseCamera.init_from_name
+ name: Intel RealSense D435I
+ fps: 30
+ width: 640
+ height: 480
+ rotation: 90
+ wrist:
+ _target_: lerobot.common.robot_devices.cameras.intelrealsense.IntelRealSenseCamera.init_from_name
+ name: Intel RealSense D405
+ fps: 30
+ width: 640
+ height: 480
diff --git a/lerobot/scripts/configure_motor.py b/lerobot/scripts/configure_motor.py
new file mode 100644
index 000000000..18707397f
--- /dev/null
+++ b/lerobot/scripts/configure_motor.py
@@ -0,0 +1,145 @@
+"""
+This script configure a single motor at a time to a given ID and baudrate.
+
+Example of usage:
+```bash
+python lerobot/scripts/configure_motor.py \
+ --port /dev/tty.usbmodem585A0080521 \
+ --brand feetech \
+ --model sts3215 \
+ --baudrate 1000000 \
+ --ID 1
+```
+"""
+
+import argparse
+import time
+
+
+def configure_motor(port, brand, model, motor_idx_des, baudrate_des):
+ if brand == "feetech":
+ from lerobot.common.robot_devices.motors.feetech import MODEL_BAUDRATE_TABLE
+ from lerobot.common.robot_devices.motors.feetech import (
+ SCS_SERIES_BAUDRATE_TABLE as SERIES_BAUDRATE_TABLE,
+ )
+ from lerobot.common.robot_devices.motors.feetech import FeetechMotorsBus as MotorsBusClass
+ elif brand == "dynamixel":
+ from lerobot.common.robot_devices.motors.dynamixel import MODEL_BAUDRATE_TABLE
+ from lerobot.common.robot_devices.motors.dynamixel import (
+ X_SERIES_BAUDRATE_TABLE as SERIES_BAUDRATE_TABLE,
+ )
+ from lerobot.common.robot_devices.motors.dynamixel import DynamixelMotorsBus as MotorsBusClass
+ else:
+ raise ValueError(
+ f"Currently we do not support this motor brand: {brand}. We currently support feetech and dynamixel motors."
+ )
+
+ # Check if the provided model exists in the model_baud_rate_table
+ if model not in MODEL_BAUDRATE_TABLE:
+ raise ValueError(
+ f"Invalid model '{model}' for brand '{brand}'. Supported models: {list(MODEL_BAUDRATE_TABLE.keys())}"
+ )
+
+ # Setup motor names, indices, and models
+ motor_name = "motor"
+ motor_index_arbitrary = motor_idx_des # Use the motor ID passed via argument
+ motor_model = model # Use the motor model passed via argument
+
+ # Initialize the MotorBus with the correct port and motor configurations
+ motor_bus = MotorsBusClass(port=port, motors={motor_name: (motor_index_arbitrary, motor_model)})
+
+ # Try to connect to the motor bus and handle any connection-specific errors
+ try:
+ motor_bus.connect()
+ print(f"Connected on port {motor_bus.port}")
+ except OSError as e:
+ print(f"Error occurred when connecting to the motor bus: {e}")
+ return
+
+ # Motor bus is connected, proceed with the rest of the operations
+ try:
+ print("Scanning all baudrates and motor indices")
+ all_baudrates = set(SERIES_BAUDRATE_TABLE.values())
+ motor_index = -1 # Set the motor index to an out-of-range value.
+
+ for baudrate in all_baudrates:
+ motor_bus.set_bus_baudrate(baudrate)
+ present_ids = motor_bus.find_motor_indices(list(range(1, 10)))
+ if len(present_ids) > 1:
+ raise ValueError(
+ "Error: More than one motor ID detected. This script is designed to only handle one motor at a time. Please disconnect all but one motor."
+ )
+
+ if len(present_ids) == 1:
+ if motor_index != -1:
+ raise ValueError(
+ "Error: More than one motor ID detected. This script is designed to only handle one motor at a time. Please disconnect all but one motor."
+ )
+ motor_index = present_ids[0]
+
+ if motor_index == -1:
+ raise ValueError("No motors detected. Please ensure you have one motor connected.")
+
+ print(f"Motor index found at: {motor_index}")
+
+ if brand == "feetech":
+ # Allows ID and BAUDRATE to be written in memory
+ motor_bus.write_with_motor_ids(motor_bus.motor_models, motor_index, "Lock", 0)
+
+ if baudrate != baudrate_des:
+ print(f"Setting its baudrate to {baudrate_des}")
+ baudrate_idx = list(SERIES_BAUDRATE_TABLE.values()).index(baudrate_des)
+
+ # The write can fail, so we allow retries
+ motor_bus.write_with_motor_ids(motor_bus.motor_models, motor_index, "Baud_Rate", baudrate_idx)
+ time.sleep(0.5)
+ motor_bus.set_bus_baudrate(baudrate_des)
+ present_baudrate_idx = motor_bus.read_with_motor_ids(
+ motor_bus.motor_models, motor_index, "Baud_Rate", num_retry=2
+ )
+
+ if present_baudrate_idx != baudrate_idx:
+ raise OSError("Failed to write baudrate.")
+
+ print(f"Setting its index to desired index {motor_idx_des}")
+ motor_bus.write_with_motor_ids(motor_bus.motor_models, motor_index, "Lock", 0)
+ motor_bus.write_with_motor_ids(motor_bus.motor_models, motor_index, "ID", motor_idx_des)
+
+ present_idx = motor_bus.read_with_motor_ids(motor_bus.motor_models, motor_idx_des, "ID", num_retry=2)
+ if present_idx != motor_idx_des:
+ raise OSError("Failed to write index.")
+
+ if brand == "feetech":
+ # Set Maximum_Acceleration to 254 to speedup acceleration and deceleration of
+ # the motors. Note: this configuration is not in the official STS3215 Memory Table
+ motor_bus.write("Lock", 0)
+ motor_bus.write("Maximum_Acceleration", 254)
+
+ motor_bus.write("Goal_Position", 2048)
+ time.sleep(4)
+ print("Present Position", motor_bus.read("Present_Position"))
+
+ motor_bus.write("Offset", 0)
+ time.sleep(4)
+ print("Offset", motor_bus.read("Offset"))
+
+ except Exception as e:
+ print(f"Error occurred during motor configuration: {e}")
+
+ finally:
+ motor_bus.disconnect()
+ print("Disconnected from motor bus.")
+
+
+if __name__ == "__main__":
+ parser = argparse.ArgumentParser()
+ parser.add_argument("--port", type=str, required=True, help="Motors bus port (e.g. dynamixel,feetech)")
+ parser.add_argument("--brand", type=str, required=True, help="Motor brand (e.g. dynamixel,feetech)")
+ parser.add_argument("--model", type=str, required=True, help="Motor model (e.g. xl330-m077,sts3215)")
+ parser.add_argument("--ID", type=int, required=True, help="Desired ID of the current motor (e.g. 1,2,3)")
+ parser.add_argument(
+ "--baudrate", type=int, default=1000000, help="Desired baudrate for the motor (default: 1000000)"
+ )
+ args = parser.parse_args()
+
+ configure_motor(args.port, args.brand, args.model, args.ID, args.baudrate)
diff --git a/lerobot/scripts/control_robot.py b/lerobot/scripts/control_robot.py
index a6506a3fe..563023f48 100644
--- a/lerobot/scripts/control_robot.py
+++ b/lerobot/scripts/control_robot.py
@@ -29,7 +29,6 @@
```bash
python lerobot/scripts/control_robot.py record \
--fps 30 \
- --root tmp/data \
--repo-id $USER/koch_test \
--num-episodes 1 \
--run-compute-stats 0
@@ -38,7 +37,6 @@
- Visualize dataset:
```bash
python lerobot/scripts/visualize_dataset.py \
- --root tmp/data \
--repo-id $USER/koch_test \
--episode-index 0
```
@@ -47,7 +45,6 @@
```bash
python lerobot/scripts/control_robot.py replay \
--fps 30 \
- --root tmp/data \
--repo-id $USER/koch_test \
--episode 0
```
@@ -57,7 +54,6 @@
```bash
python lerobot/scripts/control_robot.py record \
--fps 30 \
- --root data \
--repo-id $USER/koch_pick_place_lego \
--num-episodes 50 \
--warmup-time-s 2 \
@@ -77,7 +73,7 @@
- Train on this dataset with the ACT policy:
```bash
-DATA_DIR=data python lerobot/scripts/train.py \
+python lerobot/scripts/train.py \
policy=act_koch_real \
env=koch_real \
dataset_repo_id=$USER/koch_pick_place_lego \
@@ -88,7 +84,6 @@
```bash
python lerobot/scripts/control_robot.py record \
--fps 30 \
- --root data \
--repo-id $USER/eval_act_koch_real \
--num-episodes 10 \
--warmup-time-s 2 \
@@ -99,161 +94,51 @@
"""
import argparse
-import concurrent.futures
-import json
import logging
-import os
-import platform
-import shutil
import time
-import traceback
-from contextlib import nullcontext
-from functools import cache
from pathlib import Path
-
-import cv2
-import torch
-import tqdm
-from omegaconf import DictConfig
-from PIL import Image
-from termcolor import colored
+from typing import List
# from safetensors.torch import load_file, save_file
-from lerobot.common.datasets.compute_stats import compute_stats
-from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION, LeRobotDataset
-from lerobot.common.datasets.push_dataset_to_hub.aloha_hdf5_format import to_hf_dataset
-from lerobot.common.datasets.push_dataset_to_hub.utils import concatenate_episodes, get_default_encoding
-from lerobot.common.datasets.utils import calculate_episode_data_index, create_branch
-from lerobot.common.datasets.video_utils import encode_video_frames
-from lerobot.common.policies.factory import make_policy
-from lerobot.common.robot_devices.robots.factory import make_robot
-from lerobot.common.robot_devices.robots.utils import Robot, get_arm_id
-from lerobot.common.robot_devices.utils import busy_wait
-from lerobot.common.utils.utils import get_safe_torch_device, init_hydra_config, init_logging, set_global_seed
-from lerobot.scripts.eval import get_pretrained_policy_path
-from lerobot.scripts.push_dataset_to_hub import (
- push_dataset_card_to_hub,
- push_meta_data_to_hub,
- push_videos_to_hub,
- save_meta_data,
+from lerobot.common.datasets.lerobot_dataset import LeRobotDataset
+from lerobot.common.robot_devices.control_utils import (
+ control_loop,
+ has_method,
+ init_keyboard_listener,
+ init_policy,
+ log_control_info,
+ record_episode,
+ reset_environment,
+ sanity_check_dataset_name,
+ sanity_check_dataset_robot_compatibility,
+ stop_recording,
+ warmup_record,
)
-
-########################################################################################
-# Utilities
-########################################################################################
-
-
-def say(text, blocking=False):
- # Check if mac, linux, or windows.
- if platform.system() == "Darwin":
- cmd = f'say "{text}"'
- elif platform.system() == "Linux":
- cmd = f'spd-say "{text}"'
- elif platform.system() == "Windows":
- cmd = (
- 'PowerShell -Command "Add-Type -AssemblyName System.Speech; '
- f"(New-Object System.Speech.Synthesis.SpeechSynthesizer).Speak('{text}')\""
- )
-
- if not blocking and platform.system() in ["Darwin", "Linux"]:
- # TODO(rcadene): Make it work for Windows
- # Use the ampersand to run command in the background
- cmd += " &"
-
- os.system(cmd)
-
-
-def save_image(img_tensor, key, frame_index, episode_index, videos_dir):
- img = Image.fromarray(img_tensor.numpy())
- path = videos_dir / f"{key}_episode_{episode_index:06d}" / f"frame_{frame_index:06d}.png"
- path.parent.mkdir(parents=True, exist_ok=True)
- img.save(str(path), quality=100)
-
-
-def none_or_int(value):
- if value == "None":
- return None
- return int(value)
-
-
-def log_control_info(robot, dt_s, episode_index=None, frame_index=None, fps=None):
- log_items = []
- if episode_index is not None:
- log_items.append(f"ep:{episode_index}")
- if frame_index is not None:
- log_items.append(f"frame:{frame_index}")
-
- def log_dt(shortname, dt_val_s):
- nonlocal log_items, fps
- info_str = f"{shortname}:{dt_val_s * 1000:5.2f} ({1/ dt_val_s:3.1f}hz)"
- if fps is not None:
- actual_fps = 1 / dt_val_s
- if actual_fps < fps - 1:
- info_str = colored(info_str, "yellow")
- log_items.append(info_str)
-
- # total step time displayed in milliseconds and its frequency
- log_dt("dt", dt_s)
-
- for name in robot.leader_arms:
- key = f"read_leader_{name}_pos_dt_s"
- if key in robot.logs:
- log_dt("dtRlead", robot.logs[key])
-
- for name in robot.follower_arms:
- key = f"write_follower_{name}_goal_pos_dt_s"
- if key in robot.logs:
- log_dt("dtWfoll", robot.logs[key])
-
- key = f"read_follower_{name}_pos_dt_s"
- if key in robot.logs:
- log_dt("dtRfoll", robot.logs[key])
-
- for name in robot.cameras:
- key = f"read_camera_{name}_dt_s"
- if key in robot.logs:
- log_dt(f"dtR{name}", robot.logs[key])
-
- info_str = " ".join(log_items)
- logging.info(info_str)
-
-
-@cache
-def is_headless():
- """Detects if python is running without a monitor."""
- try:
- import pynput # noqa
-
- return False
- except Exception:
- print(
- "Error trying to import pynput. Switching to headless mode. "
- "As a result, the video stream from the cameras won't be shown, "
- "and you won't be able to change the control flow with keyboards. "
- "For more info, see traceback below.\n"
- )
- traceback.print_exc()
- print()
- return True
-
+from lerobot.common.robot_devices.robots.factory import make_robot
+from lerobot.common.robot_devices.robots.utils import Robot
+from lerobot.common.robot_devices.utils import busy_wait, safe_disconnect
+from lerobot.common.utils.utils import init_hydra_config, init_logging, log_say, none_or_int
########################################################################################
# Control modes
########################################################################################
+@safe_disconnect
def calibrate(robot: Robot, arms: list[str] | None):
- available_arms = []
- for name in robot.follower_arms:
- arm_id = get_arm_id(name, "follower")
- available_arms.append(arm_id)
- for name in robot.leader_arms:
- arm_id = get_arm_id(name, "leader")
- available_arms.append(arm_id)
-
- unknown_arms = [arm_id for arm_id in arms if arm_id not in available_arms]
-
- available_arms_str = " ".join(available_arms)
+ # TODO(aliberts): move this code in robots' classes
+ if robot.robot_type.startswith("stretch"):
+ if not robot.is_connected:
+ robot.connect()
+ if not robot.is_homed():
+ robot.home()
+ return
+
+ if arms is None:
+ arms = robot.available_arms
+
+ unknown_arms = [arm_id for arm_id in arms if arm_id not in robot.available_arms]
+ available_arms_str = " ".join(robot.available_arms)
unknown_arms_str = " ".join(unknown_arms)
if arms is None or len(arms) == 0:
@@ -285,430 +170,193 @@ def calibrate(robot: Robot, arms: list[str] | None):
print("Calibration is done! You can now teleoperate and record datasets!")
-def teleoperate(robot: Robot, fps: int | None = None, teleop_time_s: float | None = None):
- # TODO(rcadene): Add option to record logs
- if not robot.is_connected:
- robot.connect()
-
- start_teleop_t = time.perf_counter()
- while True:
- start_loop_t = time.perf_counter()
- robot.teleop_step()
-
- if fps is not None:
- dt_s = time.perf_counter() - start_loop_t
- busy_wait(1 / fps - dt_s)
-
- dt_s = time.perf_counter() - start_loop_t
- log_control_info(robot, dt_s, fps=fps)
-
- if teleop_time_s is not None and time.perf_counter() - start_teleop_t > teleop_time_s:
- break
+@safe_disconnect
+def teleoperate(
+ robot: Robot, fps: int | None = None, teleop_time_s: float | None = None, display_cameras: bool = False
+):
+ control_loop(
+ robot,
+ control_time_s=teleop_time_s,
+ fps=fps,
+ teleoperate=True,
+ display_cameras=display_cameras,
+ )
+@safe_disconnect
def record(
robot: Robot,
- policy: torch.nn.Module | None = None,
- hydra_cfg: DictConfig | None = None,
+ root: Path,
+ repo_id: str,
+ single_task: str,
+ pretrained_policy_name_or_path: str | None = None,
+ policy_overrides: List[str] | None = None,
fps: int | None = None,
- root="data",
- repo_id="lerobot/debug",
- warmup_time_s=2,
- episode_time_s=10,
- reset_time_s=5,
- num_episodes=50,
- video=True,
- run_compute_stats=True,
- push_to_hub=True,
- tags=None,
- num_image_writers_per_camera=4,
- force_override=False,
-):
+ warmup_time_s: int | float = 2,
+ episode_time_s: int | float = 10,
+ reset_time_s: int | float = 5,
+ num_episodes: int = 50,
+ video: bool = True,
+ run_compute_stats: bool = True,
+ push_to_hub: bool = True,
+ tags: list[str] | None = None,
+ num_image_writer_processes: int = 0,
+ num_image_writer_threads_per_camera: int = 4,
+ display_cameras: bool = True,
+ play_sounds: bool = True,
+ resume: bool = False,
+ # TODO(rcadene, aliberts): remove local_files_only when refactor with dataset as argument
+ local_files_only: bool = False,
+) -> LeRobotDataset:
# TODO(rcadene): Add option to record logs
- # TODO(rcadene): Clean this function via decomposition in higher level functions
-
- _, dataset_name = repo_id.split("/")
- if dataset_name.startswith("eval_") and policy is None:
- raise ValueError(
- f"Your dataset name begins by 'eval_' ({dataset_name}) but no policy is provided ({policy})."
+ listener = None
+ events = None
+ policy = None
+ device = None
+ use_amp = None
+
+ if single_task:
+ task = single_task
+ else:
+ raise NotImplementedError("Only single-task recording is supported for now")
+
+ # Load pretrained policy
+ if pretrained_policy_name_or_path is not None:
+ policy, policy_fps, device, use_amp = init_policy(pretrained_policy_name_or_path, policy_overrides)
+
+ if fps is None:
+ fps = policy_fps
+ logging.warning(f"No fps provided, so using the fps from policy config ({policy_fps}).")
+ elif fps != policy_fps:
+ logging.warning(
+ f"There is a mismatch between the provided fps ({fps}) and the one from policy config ({policy_fps})."
+ )
+
+ if resume:
+ dataset = LeRobotDataset(
+ repo_id,
+ root=root,
+ local_files_only=local_files_only,
+ )
+ dataset.start_image_writer(
+ num_processes=num_image_writer_processes,
+ num_threads=num_image_writer_threads_per_camera * len(robot.cameras),
+ )
+ sanity_check_dataset_robot_compatibility(dataset, robot, fps, video)
+ else:
+ # Create empty dataset or load existing saved episodes
+ sanity_check_dataset_name(repo_id, policy)
+ dataset = LeRobotDataset.create(
+ repo_id,
+ fps,
+ root=root,
+ robot=robot,
+ use_videos=video,
+ image_writer_processes=num_image_writer_processes,
+ image_writer_threads=num_image_writer_threads_per_camera * len(robot.cameras),
)
-
- if not video:
- raise NotImplementedError()
if not robot.is_connected:
robot.connect()
- local_dir = Path(root) / repo_id
- if local_dir.exists() and force_override:
- shutil.rmtree(local_dir)
+ listener, events = init_keyboard_listener()
- episodes_dir = local_dir / "episodes"
- episodes_dir.mkdir(parents=True, exist_ok=True)
+ # Execute a few seconds without recording to:
+ # 1. teleoperate the robot to move it in starting position if no policy provided,
+ # 2. give times to the robot devices to connect and start synchronizing,
+ # 3. place the cameras windows on screen
+ enable_teleoperation = policy is None
+ log_say("Warmup record", play_sounds)
+ warmup_record(robot, events, enable_teleoperation, warmup_time_s, display_cameras, fps)
- videos_dir = local_dir / "videos"
- videos_dir.mkdir(parents=True, exist_ok=True)
+ if has_method(robot, "teleop_safety_stop"):
+ robot.teleop_safety_stop()
- # Logic to resume data recording
- rec_info_path = episodes_dir / "data_recording_info.json"
- if rec_info_path.exists():
- with open(rec_info_path) as f:
- rec_info = json.load(f)
- episode_index = rec_info["last_episode_index"] + 1
- else:
- episode_index = 0
+ recorded_episodes = 0
+ while True:
+ if recorded_episodes >= num_episodes:
+ break
- if is_headless():
- logging.info(
- "Headless environment detected. On-screen cameras display and keyboard inputs will not be available."
+ # TODO(aliberts): add task prompt for multitask here. Might need to temporarily disable event if
+ # input() messes with them.
+ # if multi_task:
+ # task = input("Enter your task description: ")
+
+ log_say(f"Recording episode {dataset.num_episodes}", play_sounds)
+ record_episode(
+ dataset=dataset,
+ robot=robot,
+ events=events,
+ episode_time_s=episode_time_s,
+ display_cameras=display_cameras,
+ policy=policy,
+ device=device,
+ use_amp=use_amp,
+ fps=fps,
)
- # Allow to exit early while recording an episode or resetting the environment,
- # by tapping the right arrow key '->'. This might require a sudo permission
- # to allow your terminal to monitor keyboard events.
- exit_early = False
- rerecord_episode = False
- stop_recording = False
-
- # Only import pynput if not in a headless environment
- if not is_headless():
- from pynput import keyboard
-
- def on_press(key):
- nonlocal exit_early, rerecord_episode, stop_recording
- try:
- if key == keyboard.Key.right:
- print("Right arrow key pressed. Exiting loop...")
- exit_early = True
- elif key == keyboard.Key.left:
- print("Left arrow key pressed. Exiting loop and rerecord the last episode...")
- rerecord_episode = True
- exit_early = True
- elif key == keyboard.Key.esc:
- print("Escape key pressed. Stopping data recording...")
- stop_recording = True
- exit_early = True
- except Exception as e:
- print(f"Error handling key press: {e}")
-
- listener = keyboard.Listener(on_press=on_press)
- listener.start()
-
- # Load policy if any
- if policy is not None:
- # Check device is available
- device = get_safe_torch_device(hydra_cfg.device, log=True)
-
- policy.eval()
- policy.to(device)
-
- torch.backends.cudnn.benchmark = True
- torch.backends.cuda.matmul.allow_tf32 = True
- set_global_seed(hydra_cfg.seed)
-
- # override fps using policy fps
- fps = hydra_cfg.env.fps
-
- # Execute a few seconds without recording data, to give times
- # to the robot devices to connect and start synchronizing.
- timestamp = 0
- start_warmup_t = time.perf_counter()
- is_warmup_print = False
- while timestamp < warmup_time_s:
- if not is_warmup_print:
- logging.info("Warming up (no data recording)")
- say("Warming up")
- is_warmup_print = True
-
- start_loop_t = time.perf_counter()
-
- if policy is None:
- observation, action = robot.teleop_step(record_data=True)
- else:
- observation = robot.capture_observation()
-
- if not is_headless():
- image_keys = [key for key in observation if "image" in key]
- for key in image_keys:
- cv2.imshow(key, cv2.cvtColor(observation[key].numpy(), cv2.COLOR_RGB2BGR))
- cv2.waitKey(1)
-
- dt_s = time.perf_counter() - start_loop_t
- busy_wait(1 / fps - dt_s)
-
- dt_s = time.perf_counter() - start_loop_t
- log_control_info(robot, dt_s, fps=fps)
+ # Execute a few seconds without recording to give time to manually reset the environment
+ # Current code logic doesn't allow to teleoperate during this time.
+ # TODO(rcadene): add an option to enable teleoperation during reset
+ # Skip reset for the last episode to be recorded
+ if not events["stop_recording"] and (
+ (dataset.num_episodes < num_episodes - 1) or events["rerecord_episode"]
+ ):
+ log_say("Reset the environment", play_sounds)
+ reset_environment(robot, events, reset_time_s)
+
+ if events["rerecord_episode"]:
+ log_say("Re-record episode", play_sounds)
+ events["rerecord_episode"] = False
+ events["exit_early"] = False
+ dataset.clear_episode_buffer()
+ continue
+
+ dataset.save_episode(task)
+ recorded_episodes += 1
+
+ if events["stop_recording"]:
+ break
- timestamp = time.perf_counter() - start_warmup_t
-
- # Save images using threads to reach high fps (30 and more)
- # Using `with` to exist smoothly if an execption is raised.
- futures = []
- num_image_writers = num_image_writers_per_camera * len(robot.cameras)
- with concurrent.futures.ThreadPoolExecutor(max_workers=num_image_writers) as executor:
- # Start recording all episodes
- while episode_index < num_episodes:
- logging.info(f"Recording episode {episode_index}")
- say(f"Recording episode {episode_index}")
- ep_dict = {}
- frame_index = 0
- timestamp = 0
- start_episode_t = time.perf_counter()
- while timestamp < episode_time_s:
- start_loop_t = time.perf_counter()
-
- if policy is None:
- observation, action = robot.teleop_step(record_data=True)
- else:
- observation = robot.capture_observation()
-
- image_keys = [key for key in observation if "image" in key]
- not_image_keys = [key for key in observation if "image" not in key]
-
- for key in image_keys:
- futures += [
- executor.submit(
- save_image, observation[key], key, frame_index, episode_index, videos_dir
- )
- ]
-
- if not is_headless():
- image_keys = [key for key in observation if "image" in key]
- for key in image_keys:
- cv2.imshow(key, cv2.cvtColor(observation[key].numpy(), cv2.COLOR_RGB2BGR))
- cv2.waitKey(1)
-
- for key in not_image_keys:
- if key not in ep_dict:
- ep_dict[key] = []
- ep_dict[key].append(observation[key])
-
- if policy is not None:
- with (
- torch.inference_mode(),
- torch.autocast(device_type=device.type)
- if device.type == "cuda" and hydra_cfg.use_amp
- else nullcontext(),
- ):
- # Convert to pytorch format: channel first and float32 in [0,1] with batch dimension
- for name in observation:
- if "image" in name:
- observation[name] = observation[name].type(torch.float32) / 255
- observation[name] = observation[name].permute(2, 0, 1).contiguous()
- observation[name] = observation[name].unsqueeze(0)
- observation[name] = observation[name].to(device)
-
- # Compute the next action with the policy
- # based on the current observation
- action = policy.select_action(observation)
-
- # Remove batch dimension
- action = action.squeeze(0)
-
- # Move to cpu, if not already the case
- action = action.to("cpu")
-
- # Order the robot to move
- action_sent = robot.send_action(action)
-
- # Action can eventually be clipped using `max_relative_target`,
- # so action actually sent is saved in the dataset.
- action = {"action": action_sent}
-
- for key in action:
- if key not in ep_dict:
- ep_dict[key] = []
- ep_dict[key].append(action[key])
-
- frame_index += 1
-
- dt_s = time.perf_counter() - start_loop_t
- busy_wait(1 / fps - dt_s)
-
- dt_s = time.perf_counter() - start_loop_t
- log_control_info(robot, dt_s, fps=fps)
-
- timestamp = time.perf_counter() - start_episode_t
- if exit_early:
- exit_early = False
- break
-
- if not stop_recording:
- # Start resetting env while the executor are finishing
- logging.info("Reset the environment")
- say("Reset the environment")
-
- timestamp = 0
- start_vencod_t = time.perf_counter()
-
- # During env reset we save the data and encode the videos
- num_frames = frame_index
-
- for key in image_keys:
- tmp_imgs_dir = videos_dir / f"{key}_episode_{episode_index:06d}"
- fname = f"{key}_episode_{episode_index:06d}.mp4"
- video_path = local_dir / "videos" / fname
- if video_path.exists():
- video_path.unlink()
- # Store the reference to the video frame, even tho the videos are not yet encoded
- ep_dict[key] = []
- for i in range(num_frames):
- ep_dict[key].append({"path": f"videos/{fname}", "timestamp": i / fps})
-
- for key in not_image_keys:
- ep_dict[key] = torch.stack(ep_dict[key])
-
- for key in action:
- ep_dict[key] = torch.stack(ep_dict[key])
-
- ep_dict["episode_index"] = torch.tensor([episode_index] * num_frames)
- ep_dict["frame_index"] = torch.arange(0, num_frames, 1)
- ep_dict["timestamp"] = torch.arange(0, num_frames, 1) / fps
-
- done = torch.zeros(num_frames, dtype=torch.bool)
- done[-1] = True
- ep_dict["next.done"] = done
-
- ep_path = episodes_dir / f"episode_{episode_index}.pth"
- print("Saving episode dictionary...")
- torch.save(ep_dict, ep_path)
-
- rec_info = {
- "last_episode_index": episode_index,
- }
- with open(rec_info_path, "w") as f:
- json.dump(rec_info, f)
-
- is_last_episode = stop_recording or (episode_index == (num_episodes - 1))
-
- # Wait if necessary
- with tqdm.tqdm(total=reset_time_s, desc="Waiting") as pbar:
- while timestamp < reset_time_s and not is_last_episode:
- time.sleep(1)
- timestamp = time.perf_counter() - start_vencod_t
- pbar.update(1)
- if exit_early:
- exit_early = False
- break
-
- # Skip updating episode index which forces re-recording episode
- if rerecord_episode:
- rerecord_episode = False
- continue
-
- episode_index += 1
-
- if is_last_episode:
- logging.info("Done recording")
- say("Done recording", blocking=True)
- if not is_headless():
- listener.stop()
-
- logging.info("Waiting for threads writing the images on disk to terminate...")
- for _ in tqdm.tqdm(
- concurrent.futures.as_completed(futures), total=len(futures), desc="Writting images"
- ):
- pass
- break
+ log_say("Stop recording", play_sounds, blocking=True)
+ stop_recording(robot, listener, display_cameras)
- robot.disconnect()
- if not is_headless():
- cv2.destroyAllWindows()
-
- num_episodes = episode_index
-
- logging.info("Encoding videos")
- say("Encoding videos")
- # Use ffmpeg to convert frames stored as png into mp4 videos
- for episode_index in tqdm.tqdm(range(num_episodes)):
- for key in image_keys:
- tmp_imgs_dir = videos_dir / f"{key}_episode_{episode_index:06d}"
- fname = f"{key}_episode_{episode_index:06d}.mp4"
- video_path = local_dir / "videos" / fname
- if video_path.exists():
- # Skip if video is already encoded. Could be the case when resuming data recording.
- continue
- # note: `encode_video_frames` is a blocking call. Making it asynchronous shouldn't speedup encoding,
- # since video encoding with ffmpeg is already using multithreading.
- encode_video_frames(tmp_imgs_dir, video_path, fps, overwrite=True)
- shutil.rmtree(tmp_imgs_dir)
-
- logging.info("Concatenating episodes")
- ep_dicts = []
- for episode_index in tqdm.tqdm(range(num_episodes)):
- ep_path = episodes_dir / f"episode_{episode_index}.pth"
- ep_dict = torch.load(ep_path)
- ep_dicts.append(ep_dict)
- data_dict = concatenate_episodes(ep_dicts)
-
- total_frames = data_dict["frame_index"].shape[0]
- data_dict["index"] = torch.arange(0, total_frames, 1)
-
- hf_dataset = to_hf_dataset(data_dict, video)
- episode_data_index = calculate_episode_data_index(hf_dataset)
- info = {
- "codebase_version": CODEBASE_VERSION,
- "fps": fps,
- "video": video,
- }
- if video:
- info["encoding"] = get_default_encoding()
-
- lerobot_dataset = LeRobotDataset.from_preloaded(
- repo_id=repo_id,
- hf_dataset=hf_dataset,
- episode_data_index=episode_data_index,
- info=info,
- videos_dir=videos_dir,
- )
if run_compute_stats:
logging.info("Computing dataset statistics")
- say("Computing dataset statistics")
- stats = compute_stats(lerobot_dataset)
- lerobot_dataset.stats = stats
- else:
- stats = {}
- logging.info("Skipping computation of the dataset statistics")
-
- hf_dataset = hf_dataset.with_format(None) # to remove transforms that cant be saved
- hf_dataset.save_to_disk(str(local_dir / "train"))
- meta_data_dir = local_dir / "meta_data"
- save_meta_data(info, stats, episode_data_index, meta_data_dir)
+ dataset.consolidate(run_compute_stats)
if push_to_hub:
- hf_dataset.push_to_hub(repo_id, revision="main")
- push_meta_data_to_hub(repo_id, meta_data_dir, revision="main")
- push_dataset_card_to_hub(repo_id, revision="main", tags=tags)
- if video:
- push_videos_to_hub(repo_id, videos_dir, revision="main")
- create_branch(repo_id, repo_type="dataset", branch=CODEBASE_VERSION)
+ dataset.push_to_hub(tags=tags)
- logging.info("Exiting")
- say("Exiting")
- return lerobot_dataset
+ log_say("Exiting", play_sounds)
+ return dataset
-def replay(robot: Robot, episode: int, fps: int | None = None, root="data", repo_id="lerobot/debug"):
+@safe_disconnect
+def replay(
+ robot: Robot,
+ root: Path,
+ repo_id: str,
+ episode: int,
+ fps: int | None = None,
+ play_sounds: bool = True,
+ local_files_only: bool = True,
+):
+ # TODO(rcadene, aliberts): refactor with control_loop, once `dataset` is an instance of LeRobotDataset
# TODO(rcadene): Add option to record logs
- local_dir = Path(root) / repo_id
- if not local_dir.exists():
- raise ValueError(local_dir)
- dataset = LeRobotDataset(repo_id, root=root)
- items = dataset.hf_dataset.select_columns("action")
- from_idx = dataset.episode_data_index["from"][episode].item()
- to_idx = dataset.episode_data_index["to"][episode].item()
+ dataset = LeRobotDataset(repo_id, root=root, episodes=[episode], local_files_only=local_files_only)
+ actions = dataset.hf_dataset.select_columns("action")
if not robot.is_connected:
robot.connect()
- logging.info("Replaying episode")
- say("Replaying episode", blocking=True)
- for idx in range(from_idx, to_idx):
+ log_say("Replaying episode", play_sounds, blocking=True)
+ for idx in range(dataset.num_frames):
start_episode_t = time.perf_counter()
- action = items[idx]["action"]
+ action = actions[idx]["action"]
robot.send_action(action)
dt_s = time.perf_counter() - start_episode_t
@@ -749,15 +397,33 @@ def replay(robot: Robot, episode: int, fps: int | None = None, root="data", repo
parser_teleop.add_argument(
"--fps", type=none_or_int, default=None, help="Frames per second (set to None to disable)"
)
+ parser_teleop.add_argument(
+ "--display-cameras",
+ type=int,
+ default=1,
+ help="Display all cameras on screen (set to 1 to display or 0).",
+ )
parser_record = subparsers.add_parser("record", parents=[base_parser])
+ task_args = parser_record.add_mutually_exclusive_group(required=True)
parser_record.add_argument(
"--fps", type=none_or_int, default=None, help="Frames per second (set to None to disable)"
)
+ task_args.add_argument(
+ "--single-task",
+ type=str,
+ help="A short but accurate description of the task performed during the recording.",
+ )
+ # TODO(aliberts): add multi-task support
+ # task_args.add_argument(
+ # "--multi-task",
+ # type=int,
+ # help="You will need to enter the task performed at the start of each episode.",
+ # )
parser_record.add_argument(
"--root",
type=Path,
- default="data",
+ default=None,
help="Root directory where the dataset will be stored locally at '{root}/{repo_id}' (e.g. 'data/hf_username/dataset_name').",
)
parser_record.add_argument(
@@ -804,20 +470,31 @@ def replay(robot: Robot, episode: int, fps: int | None = None, root="data", repo
help="Add tags to your dataset on the hub.",
)
parser_record.add_argument(
- "--num-image-writers-per-camera",
+ "--num-image-writer-processes",
+ type=int,
+ default=0,
+ help=(
+ "Number of subprocesses handling the saving of frames as PNGs. Set to 0 to use threads only; "
+ "set to ≥1 to use subprocesses, each using threads to write images. The best number of processes "
+ "and threads depends on your system. We recommend 4 threads per camera with 0 processes. "
+ "If fps is unstable, adjust the thread count. If still unstable, try using 1 or more subprocesses."
+ ),
+ )
+ parser_record.add_argument(
+ "--num-image-writer-threads-per-camera",
type=int,
default=4,
help=(
"Number of threads writing the frames as png images on disk, per camera. "
- "Too much threads might cause unstable teleoperation fps due to main thread being blocked. "
+ "Too many threads might cause unstable teleoperation fps due to main thread being blocked. "
"Not enough threads might cause low camera fps."
),
)
parser_record.add_argument(
- "--force-override",
+ "--resume",
type=int,
default=0,
- help="By default, data recording is resumed. When set to 1, delete the local directory and start data recording from scratch.",
+ help="Resume recording on an existing dataset.",
)
parser_record.add_argument(
"-p",
@@ -842,7 +519,7 @@ def replay(robot: Robot, episode: int, fps: int | None = None, root="data", repo
parser_replay.add_argument(
"--root",
type=Path,
- default="data",
+ default=None,
help="Root directory where the dataset will be stored locally at '{root}/{repo_id}' (e.g. 'data/hf_username/dataset_name').",
)
parser_replay.add_argument(
@@ -875,19 +552,7 @@ def replay(robot: Robot, episode: int, fps: int | None = None, root="data", repo
teleoperate(robot, **kwargs)
elif control_mode == "record":
- pretrained_policy_name_or_path = args.pretrained_policy_name_or_path
- policy_overrides = args.policy_overrides
- del kwargs["pretrained_policy_name_or_path"]
- del kwargs["policy_overrides"]
-
- policy_cfg = None
- if pretrained_policy_name_or_path is not None:
- pretrained_policy_path = get_pretrained_policy_path(pretrained_policy_name_or_path)
- policy_cfg = init_hydra_config(pretrained_policy_path / "config.yaml", policy_overrides)
- policy = make_policy(hydra_cfg=policy_cfg, pretrained_policy_name_or_path=pretrained_policy_path)
- record(robot, policy, policy_cfg, **kwargs)
- else:
- record(robot, **kwargs)
+ record(robot, **kwargs)
elif control_mode == "replay":
replay(robot, **kwargs)
diff --git a/lerobot/scripts/eval.py b/lerobot/scripts/eval.py
index 0aec84720..cb4c1065e 100644
--- a/lerobot/scripts/eval.py
+++ b/lerobot/scripts/eval.py
@@ -75,6 +75,7 @@
init_hydra_config,
init_logging,
inside_slurm,
+ is_launched_with_accelerate,
set_global_seed,
)
@@ -122,7 +123,6 @@ def rollout(
# Reset the policy and environments.
policy.reset()
-
observation, info = env.reset(seed=seeds)
if render_callback is not None:
render_callback(env)
@@ -447,6 +447,7 @@ def main(
hydra_cfg_path: str | None = None,
out_dir: str | None = None,
config_overrides: list[str] | None = None,
+ accelerator: Callable = None,
):
assert (pretrained_policy_path is None) ^ (hydra_cfg_path is None)
if pretrained_policy_path is not None:
@@ -468,11 +469,11 @@ def main(
out_dir = f"outputs/eval/{dt.now().strftime('%Y-%m-%d/%H-%M-%S')}_{hydra_cfg.env.name}_{hydra_cfg.policy.name}"
# Check device is available
- device = get_safe_torch_device(hydra_cfg.device, log=True)
+ device = get_safe_torch_device(hydra_cfg.device, log=True, accelerator=accelerator)
torch.backends.cudnn.benchmark = True
torch.backends.cuda.matmul.allow_tf32 = True
- set_global_seed(hydra_cfg.seed)
+ set_global_seed(hydra_cfg.seed, accelerator=accelerator)
log_output_dir(out_dir)
@@ -484,17 +485,25 @@ def main(
policy = make_policy(hydra_cfg=hydra_cfg, pretrained_policy_name_or_path=str(pretrained_policy_path))
else:
# Note: We need the dataset stats to pass to the policy's normalization modules.
- policy = make_policy(hydra_cfg=hydra_cfg, dataset_stats=make_dataset(hydra_cfg).stats)
+ policy = make_policy(hydra_cfg=hydra_cfg, dataset_stats=make_dataset(hydra_cfg).meta.stats)
assert isinstance(policy, nn.Module)
policy.eval()
- with torch.no_grad(), torch.autocast(device_type=device.type) if hydra_cfg.use_amp else nullcontext():
+ if accelerator:
+ policy = accelerator.prepare_model(policy).to(device)
+
+ with (
+ torch.no_grad(),
+ torch.autocast(device_type=device.type)
+ if hydra_cfg.use_amp and accelerator is None
+ else nullcontext(),
+ ):
info = eval_policy(
env,
- policy,
+ policy if accelerator is None else accelerator.unwrap_model(policy, keep_fp32_wrapper=True),
hydra_cfg.eval.n_episodes,
- max_episodes_rendered=10,
+ max_episodes_rendered=hydra_cfg.eval.max_episodes_rendered,
videos_dir=Path(out_dir) / "videos",
start_seed=hydra_cfg.seed,
)
@@ -576,9 +585,19 @@ def get_pretrained_policy_path(pretrained_policy_name_or_path, revision=None):
pretrained_policy_path = get_pretrained_policy_path(
args.pretrained_policy_name_or_path, revision=args.revision
)
-
- main(
- pretrained_policy_path=pretrained_policy_path,
- out_dir=args.out_dir,
- config_overrides=args.overrides,
- )
+ if is_launched_with_accelerate():
+ import accelerate
+
+ accelerator = accelerate.Accelerator()
+ main(
+ pretrained_policy_path=pretrained_policy_path,
+ out_dir=args.out_dir,
+ config_overrides=args.overrides,
+ accelerator=accelerator,
+ )
+ else:
+ main(
+ pretrained_policy_path=pretrained_policy_path,
+ out_dir=args.out_dir,
+ config_overrides=args.overrides,
+ )
diff --git a/lerobot/scripts/find_motors_bus_port.py b/lerobot/scripts/find_motors_bus_port.py
new file mode 100644
index 000000000..67b92ad7d
--- /dev/null
+++ b/lerobot/scripts/find_motors_bus_port.py
@@ -0,0 +1,42 @@
+import os
+import time
+from pathlib import Path
+
+from serial.tools import list_ports # Part of pyserial library
+
+
+def find_available_ports():
+ if os.name == "nt": # Windows
+ # List COM ports using pyserial
+ ports = [port.device for port in list_ports.comports()]
+ else: # Linux/macOS
+ # List /dev/tty* ports for Unix-based systems
+ ports = [str(path) for path in Path("/dev").glob("tty*")]
+ return ports
+
+
+def find_port():
+ print("Finding all available ports for the MotorsBus.")
+ ports_before = find_available_ports()
+ print("Ports before disconnecting:", ports_before)
+
+ print("Remove the USB cable from your MotorsBus and press Enter when done.")
+ input() # Wait for user to disconnect the device
+
+ time.sleep(0.5) # Allow some time for port to be released
+ ports_after = find_available_ports()
+ ports_diff = list(set(ports_before) - set(ports_after))
+
+ if len(ports_diff) == 1:
+ port = ports_diff[0]
+ print(f"The port of this MotorsBus is '{port}'")
+ print("Reconnect the USB cable.")
+ elif len(ports_diff) == 0:
+ raise OSError(f"Could not detect the port. No difference was found ({ports_diff}).")
+ else:
+ raise OSError(f"Could not detect the port. More than one port was found ({ports_diff}).")
+
+
+if __name__ == "__main__":
+ # Helper to find the USB port associated with your MotorsBus.
+ find_port()
diff --git a/lerobot/scripts/push_dataset_to_hub.py b/lerobot/scripts/push_dataset_to_hub.py
index adc4c72ad..2bb641a4d 100644
--- a/lerobot/scripts/push_dataset_to_hub.py
+++ b/lerobot/scripts/push_dataset_to_hub.py
@@ -117,10 +117,14 @@ def push_meta_data_to_hub(repo_id: str, meta_data_dir: str | Path, revision: str
def push_dataset_card_to_hub(
- repo_id: str, revision: str | None, tags: list | None = None, text: str | None = None
+ repo_id: str,
+ revision: str | None,
+ tags: list | None = None,
+ license: str = "apache-2.0",
+ **card_kwargs,
):
"""Creates and pushes a LeRobotDataset Card with appropriate tags to easily find it on the hub."""
- card = create_lerobot_dataset_card(tags=tags, text=text)
+ card = create_lerobot_dataset_card(tags=tags, license=license, **card_kwargs)
card.push_to_hub(repo_id=repo_id, repo_type="dataset", revision=revision)
@@ -260,7 +264,7 @@ def push_dataset_to_hub(
episode_index = 0
tests_videos_dir = tests_data_dir / repo_id / "videos"
tests_videos_dir.mkdir(parents=True, exist_ok=True)
- for key in lerobot_dataset.video_frame_keys:
+ for key in lerobot_dataset.camera_keys:
fname = f"{key}_episode_{episode_index:06d}.mp4"
shutil.copy(videos_dir / fname, tests_videos_dir / fname)
diff --git a/lerobot/scripts/train.py b/lerobot/scripts/train.py
index 45807503f..0d23eee6f 100644
--- a/lerobot/scripts/train.py
+++ b/lerobot/scripts/train.py
@@ -21,6 +21,7 @@
from pathlib import Path
from pprint import pformat
from threading import Lock
+from typing import Callable
import hydra
import numpy as np
@@ -43,10 +44,13 @@
from lerobot.common.policies.utils import get_device_from_parameters
from lerobot.common.utils.utils import (
format_big_number,
+ get_accelerate_config,
get_safe_torch_device,
init_hydra_config,
init_logging,
+ is_launched_with_accelerate,
set_global_seed,
+ update_omegaconf,
)
from lerobot.scripts.eval import eval_policy
@@ -84,12 +88,15 @@ def make_optimizer_and_scheduler(cfg, policy):
)
from diffusers.optimization import get_scheduler
- lr_scheduler = get_scheduler(
- cfg.training.lr_scheduler,
- optimizer=optimizer,
- num_warmup_steps=cfg.training.lr_warmup_steps,
- num_training_steps=cfg.training.offline_steps,
- )
+ if cfg.training.lr_scheduler:
+ lr_scheduler = get_scheduler(
+ cfg.training.lr_scheduler,
+ optimizer=optimizer,
+ num_warmup_steps=cfg.training.lr_warmup_steps,
+ num_training_steps=cfg.training.offline_steps,
+ )
+ else:
+ lr_scheduler = None
elif policy.name == "tdmpc":
optimizer = torch.optim.Adam(policy.parameters(), cfg.training.lr)
lr_scheduler = None
@@ -113,32 +120,42 @@ def update_policy(
lr_scheduler=None,
use_amp: bool = False,
lock=None,
+ accelerator: Callable = None,
):
"""Returns a dictionary of items for logging."""
start_time = time.perf_counter()
device = get_device_from_parameters(policy)
policy.train()
- with torch.autocast(device_type=device.type) if use_amp else nullcontext():
+ with torch.autocast(device_type=device.type) if use_amp and accelerator is None else nullcontext():
output_dict = policy.forward(batch)
# TODO(rcadene): policy.unnormalize_outputs(out_dict)
loss = output_dict["loss"]
- grad_scaler.scale(loss).backward()
-
- # Unscale the graident of the optimzer's assigned params in-place **prior to gradient clipping**.
- grad_scaler.unscale_(optimizer)
-
- grad_norm = torch.nn.utils.clip_grad_norm_(
- policy.parameters(),
- grad_clip_norm,
- error_if_nonfinite=False,
- )
+ if accelerator:
+ accelerator.backward(loss)
+ accelerator.unscale_gradients(optimizer=optimizer)
+ grad_norm = torch.nn.utils.clip_grad_norm_(
+ policy.parameters(),
+ grad_clip_norm,
+ error_if_nonfinite=False,
+ )
+ optimizer.step()
+ else:
+ grad_scaler.scale(loss).backward()
+ # Unscale the graident of the optimzer's assigned params in-place **prior to gradient clipping**.
+ grad_scaler.unscale_(optimizer)
+
+ grad_norm = torch.nn.utils.clip_grad_norm_(
+ policy.parameters(),
+ grad_clip_norm,
+ error_if_nonfinite=False,
+ )
- # Optimizer's gradients are already unscaled, so scaler.step does not unscale them,
- # although it still skips optimizer.step() if the gradients contain infs or NaNs.
- with lock if lock is not None else nullcontext():
- grad_scaler.step(optimizer)
- # Updates the scale for next iteration.
- grad_scaler.update()
+ # Optimizer's gradients are already unscaled, so scaler.step does not unscale them,
+ # although it still skips optimizer.step() if the gradients contain infs or NaNs.
+ with lock if lock is not None else nullcontext():
+ grad_scaler.step(optimizer)
+ # Updates the scale for next iteration.
+ grad_scaler.update()
optimizer.zero_grad()
@@ -147,7 +164,10 @@ def update_policy(
if isinstance(policy, PolicyWithUpdate):
# To possibly update an internal buffer (for instance an Exponential Moving Average like in TDMPC).
- policy.update()
+ if accelerator:
+ accelerator.unwrap_model(policy, keep_fp32_wrapper=True).update()
+ else:
+ policy.update()
info = {
"loss": loss.item(),
@@ -161,7 +181,7 @@ def update_policy(
return info
-def log_train_info(logger: Logger, info, step, cfg, dataset, is_online):
+def log_train_info(logger: Logger, info, step, cfg, dataset, is_online, accelerator: Callable = None):
loss = info["loss"]
grad_norm = info["grad_norm"]
lr = info["lr"]
@@ -170,10 +190,10 @@ def log_train_info(logger: Logger, info, step, cfg, dataset, is_online):
# A sample is an (observation,action) pair, where observation and action
# can be on multiple timestamps. In a batch, we have `batch_size`` number of samples.
- num_samples = (step + 1) * cfg.training.batch_size
- avg_samples_per_ep = dataset.num_samples / dataset.num_episodes
+ num_samples = (step + 1) * cfg.training.batch_size * (accelerator.num_processes if accelerator else 1)
+ avg_samples_per_ep = dataset.num_frames / dataset.num_episodes
num_episodes = num_samples / avg_samples_per_ep
- num_epochs = num_samples / dataset.num_samples
+ num_epochs = num_samples / dataset.num_frames
log_items = [
f"step:{format_big_number(step)}",
# number of samples seen during training
@@ -200,17 +220,17 @@ def log_train_info(logger: Logger, info, step, cfg, dataset, is_online):
logger.log_dict(info, step, mode="train")
-def log_eval_info(logger, info, step, cfg, dataset, is_online):
+def log_eval_info(logger, info, step, cfg, dataset, is_online, accelerator: Callable = None):
eval_s = info["eval_s"]
avg_sum_reward = info["avg_sum_reward"]
pc_success = info["pc_success"]
# A sample is an (observation,action) pair, where observation and action
# can be on multiple timestamps. In a batch, we have `batch_size`` number of samples.
- num_samples = (step + 1) * cfg.training.batch_size
- avg_samples_per_ep = dataset.num_samples / dataset.num_episodes
+ num_samples = (step + 1) * cfg.training.batch_size * (accelerator.num_processes if accelerator else 1)
+ avg_samples_per_ep = dataset.num_frames / dataset.num_episodes
num_episodes = num_samples / avg_samples_per_ep
- num_epochs = num_samples / dataset.num_samples
+ num_epochs = num_samples / dataset.num_frames
log_items = [
f"step:{format_big_number(step)}",
# number of samples seen during training
@@ -234,13 +254,22 @@ def log_eval_info(logger, info, step, cfg, dataset, is_online):
logger.log_dict(info, step, mode="eval")
-def train(cfg: DictConfig, out_dir: str | None = None, job_name: str | None = None):
+def train(
+ cfg: DictConfig, out_dir: str | None = None, job_name: str | None = None, accelerator: Callable = None
+):
if out_dir is None:
raise NotImplementedError()
if job_name is None:
raise NotImplementedError()
- init_logging()
+ init_logging(accelerator)
+ if accelerator:
+ assert cfg.training.online_steps == 0, "Online training with accelerate is not implemented."
+ accelerator_config = get_accelerate_config(accelerator)
+ update_omegaconf(cfg, config_name="accelerator_config", config=accelerator_config)
+ logging.info(
+ f"Acccelerate is enabled, training will be launched with the following configuration:\n{pformat(accelerator_config)}"
+ )
logging.info(pformat(OmegaConf.to_container(cfg)))
if cfg.training.online_steps > 0 and isinstance(cfg.dataset_repo_id, ListConfig):
@@ -299,12 +328,15 @@ def train(cfg: DictConfig, out_dir: str | None = None, job_name: str | None = No
)
# log metrics to terminal and wandb
+ if accelerator and not accelerator.is_main_process:
+ # Disable logging on non-main processes.
+ cfg.wandb.enable = False
logger = Logger(cfg, out_dir, wandb_job_name=job_name)
- set_global_seed(cfg.seed)
+ set_global_seed(cfg.seed, accelerator=accelerator)
# Check device is available
- device = get_safe_torch_device(cfg.device, log=True)
+ device = get_safe_torch_device(cfg.device, log=True, accelerator=accelerator)
torch.backends.cudnn.benchmark = True
torch.backends.cuda.matmul.allow_tf32 = True
@@ -323,15 +355,16 @@ def train(cfg: DictConfig, out_dir: str | None = None, job_name: str | None = No
eval_env = None
if cfg.training.eval_freq > 0:
logging.info("make_env")
- eval_env = make_env(cfg)
+ eval_env = make_env(cfg, out_dir=out_dir)
logging.info("make_policy")
policy = make_policy(
hydra_cfg=cfg,
- dataset_stats=offline_dataset.stats if not cfg.resume else None,
+ dataset_stats=offline_dataset.meta.stats if not cfg.resume else None,
pretrained_policy_name_or_path=str(logger.last_pretrained_model_dir) if cfg.resume else None,
)
assert isinstance(policy, nn.Module)
+ policy.to(device)
# Create optimizer and scheduler
# Temporary hack to move optimizer out of policy
optimizer, lr_scheduler = make_optimizer_and_scheduler(cfg, policy)
@@ -349,7 +382,7 @@ def train(cfg: DictConfig, out_dir: str | None = None, job_name: str | None = No
logging.info(f"{cfg.env.task=}")
logging.info(f"{cfg.training.offline_steps=} ({format_big_number(cfg.training.offline_steps)})")
logging.info(f"{cfg.training.online_steps=}")
- logging.info(f"{offline_dataset.num_samples=} ({format_big_number(offline_dataset.num_samples)})")
+ logging.info(f"{offline_dataset.num_frames=} ({format_big_number(offline_dataset.num_frames)})")
logging.info(f"{offline_dataset.num_episodes=}")
logging.info(f"{num_learnable_params=} ({format_big_number(num_learnable_params)})")
logging.info(f"{num_total_params=} ({format_big_number(num_total_params)})")
@@ -361,17 +394,32 @@ def evaluate_and_checkpoint_if_needed(step, is_online):
if cfg.training.eval_freq > 0 and step % cfg.training.eval_freq == 0:
logging.info(f"Eval policy at step {step}")
- with torch.no_grad(), torch.autocast(device_type=device.type) if cfg.use_amp else nullcontext():
+ logging.info(f"max_episodes_rendered {cfg.eval.max_episodes_rendered}")
+ with (
+ torch.no_grad(),
+ torch.autocast(device_type=device.type) if cfg.use_amp and not accelerator else nullcontext(),
+ ):
assert eval_env is not None
+ if accelerator:
+ accelerator.wait_for_everyone()
+ logging.info("eval_policy")
eval_info = eval_policy(
eval_env,
- policy,
+ policy if not accelerator else accelerator.unwrap_model(policy, keep_fp32_wrapper=True),
cfg.eval.n_episodes,
videos_dir=Path(out_dir) / "eval" / f"videos_step_{step_identifier}",
- max_episodes_rendered=4,
+ max_episodes_rendered=cfg.eval.max_episodes_rendered,
start_seed=cfg.seed,
)
- log_eval_info(logger, eval_info["aggregated"], step, cfg, offline_dataset, is_online=is_online)
+ log_eval_info(
+ logger,
+ eval_info["aggregated"],
+ step,
+ cfg,
+ offline_dataset,
+ is_online=is_online,
+ accelerator=accelerator,
+ )
if cfg.wandb.enable:
logger.log_video(eval_info["video_paths"][0], step, mode="eval")
logging.info("Resume training")
@@ -379,13 +427,14 @@ def evaluate_and_checkpoint_if_needed(step, is_online):
if cfg.training.save_checkpoint and (
step % cfg.training.save_freq == 0
or step == cfg.training.offline_steps + cfg.training.online_steps
+ and (not accelerator or accelerator.is_main_process)
):
logging.info(f"Checkpoint policy after step {step}")
# Note: Save with step as the identifier, and format it to have at least 6 digits but more if
# needed (choose 6 as a minimum for consistency without being overkill).
- logger.save_checkpont(
+ logger.save_checkpoint(
step,
- policy,
+ policy if not accelerator else accelerator.unwrap_model(policy),
optimizer,
lr_scheduler,
identifier=step_identifier,
@@ -412,6 +461,10 @@ def evaluate_and_checkpoint_if_needed(step, is_online):
pin_memory=device.type != "cpu",
drop_last=False,
)
+ if accelerator:
+ policy, optimizer, dataloader, lr_scheduler = accelerator.prepare(
+ policy, optimizer, dataloader, lr_scheduler
+ )
dl_iter = cycle(dataloader)
policy.train()
@@ -435,12 +488,15 @@ def evaluate_and_checkpoint_if_needed(step, is_online):
grad_scaler=grad_scaler,
lr_scheduler=lr_scheduler,
use_amp=cfg.use_amp,
+ accelerator=accelerator,
)
train_info["dataloading_s"] = dataloading_s
- if step % cfg.training.log_freq == 0:
- log_train_info(logger, train_info, step, cfg, offline_dataset, is_online=False)
+ if (step % cfg.training.log_freq == 0) and (not accelerator or accelerator.is_main_process):
+ log_train_info(
+ logger, train_info, step, cfg, offline_dataset, is_online=False, accelerator=accelerator
+ )
# Note: evaluate_and_checkpoint_if_needed happens **after** the `step`th training update has completed,
# so we pass in step + 1.
@@ -573,7 +629,7 @@ def sample_trajectory_and_update_buffer():
online_drop_n_last_frames=cfg.training.get("drop_n_last_frames", 0) + 1,
online_sampling_ratio=cfg.training.online_sampling_ratio,
)
- sampler.num_samples = len(concat_dataset)
+ sampler.num_frames = len(concat_dataset)
update_online_buffer_s = time.perf_counter() - start_update_buffer_time
@@ -613,6 +669,7 @@ def sample_trajectory_and_update_buffer():
lr_scheduler=lr_scheduler,
use_amp=cfg.use_amp,
lock=lock,
+ accelerator=accelerator,
)
train_info["dataloading_s"] = dataloading_s
@@ -649,11 +706,24 @@ def sample_trajectory_and_update_buffer():
@hydra.main(version_base="1.2", config_name="default", config_path="../configs")
def train_cli(cfg: dict):
- train(
- cfg,
- out_dir=hydra.core.hydra_config.HydraConfig.get().run.dir,
- job_name=hydra.core.hydra_config.HydraConfig.get().job.name,
- )
+ if is_launched_with_accelerate():
+ import accelerate
+
+ # We set step_scheduler_with_optimizer False to prevent accelerate from
+ # adjusting the lr_scheduler steps based on the num_processes
+ accelerator = accelerate.Accelerator(step_scheduler_with_optimizer=False)
+ train(
+ cfg,
+ out_dir=hydra.core.hydra_config.HydraConfig.get().run.dir,
+ job_name=hydra.core.hydra_config.HydraConfig.get().job.name,
+ accelerator=accelerator,
+ )
+ else:
+ train(
+ cfg,
+ out_dir=hydra.core.hydra_config.HydraConfig.get().run.dir,
+ job_name=hydra.core.hydra_config.HydraConfig.get().job.name,
+ )
def train_notebook(out_dir=None, job_name=None, config_name="default", config_path="../configs"):
diff --git a/lerobot/scripts/visualize_dataset.py b/lerobot/scripts/visualize_dataset.py
index 6cff5752a..cdd5ce605 100644
--- a/lerobot/scripts/visualize_dataset.py
+++ b/lerobot/scripts/visualize_dataset.py
@@ -100,7 +100,7 @@ def to_hwc_uint8_numpy(chw_float32_torch: torch.Tensor) -> np.ndarray:
def visualize_dataset(
- repo_id: str,
+ dataset: LeRobotDataset,
episode_index: int,
batch_size: int = 32,
num_workers: int = 0,
@@ -108,7 +108,6 @@ def visualize_dataset(
web_port: int = 9090,
ws_port: int = 9087,
save: bool = False,
- root: Path | None = None,
output_dir: Path | None = None,
) -> Path | None:
if save:
@@ -116,8 +115,7 @@ def visualize_dataset(
output_dir is not None
), "Set an output directory where to write .rrd files with `--output-dir path/to/directory`."
- logging.info("Loading dataset")
- dataset = LeRobotDataset(repo_id, root=root)
+ repo_id = dataset.repo_id
logging.info("Loading dataloader")
episode_sampler = EpisodeSampler(dataset, episode_index)
@@ -153,7 +151,7 @@ def visualize_dataset(
rr.set_time_seconds("timestamp", batch["timestamp"][i].item())
# display each camera image
- for key in dataset.camera_keys:
+ for key in dataset.meta.camera_keys:
# TODO(rcadene): add `.compress()`? is it lossless?
rr.log(key, rr.Image(to_hwc_uint8_numpy(batch[key][i])))
@@ -209,11 +207,17 @@ def main():
required=True,
help="Episode to visualize.",
)
+ parser.add_argument(
+ "--local-files-only",
+ type=int,
+ default=0,
+ help="Use local files only. By default, this script will try to fetch the dataset from the hub if it exists.",
+ )
parser.add_argument(
"--root",
type=Path,
default=None,
- help="Root directory for a dataset stored locally (e.g. `--root data`). By default, the dataset will be loaded from hugging face cache folder, or downloaded from the hub if available.",
+ help="Root directory for the dataset stored locally (e.g. `--root data`). By default, the dataset will be loaded from hugging face cache folder, or downloaded from the hub if available.",
)
parser.add_argument(
"--output-dir",
@@ -268,7 +272,15 @@ def main():
)
args = parser.parse_args()
- visualize_dataset(**vars(args))
+ kwargs = vars(args)
+ repo_id = kwargs.pop("repo_id")
+ root = kwargs.pop("root")
+ local_files_only = kwargs.pop("local_files_only")
+
+ logging.info("Loading dataset")
+ dataset = LeRobotDataset(repo_id, root=root, local_files_only=local_files_only)
+
+ visualize_dataset(dataset, **vars(args))
if __name__ == "__main__":
diff --git a/lerobot/scripts/visualize_dataset_html.py b/lerobot/scripts/visualize_dataset_html.py
index c035e5626..2c81fbfc5 100644
--- a/lerobot/scripts/visualize_dataset_html.py
+++ b/lerobot/scripts/visualize_dataset_html.py
@@ -93,18 +93,17 @@ def index():
def show_episode(dataset_namespace, dataset_name, episode_id):
dataset_info = {
"repo_id": dataset.repo_id,
- "num_samples": dataset.num_samples,
+ "num_samples": dataset.num_frames,
"num_episodes": dataset.num_episodes,
"fps": dataset.fps,
}
- video_paths = get_episode_video_paths(dataset, episode_id)
- language_instruction = get_episode_language_instruction(dataset, episode_id)
+ video_paths = [dataset.meta.get_video_file_path(episode_id, key) for key in dataset.meta.video_keys]
+ tasks = dataset.meta.episodes[episode_id]["tasks"]
videos_info = [
- {"url": url_for("static", filename=video_path), "filename": Path(video_path).name}
+ {"url": url_for("static", filename=video_path), "filename": video_path.name}
for video_path in video_paths
]
- if language_instruction:
- videos_info[0]["language_instruction"] = language_instruction
+ videos_info[0]["language_instruction"] = tasks
ep_csv_url = url_for("static", filename=get_ep_csv_fname(episode_id))
return render_template(
@@ -131,16 +130,16 @@ def write_episode_data_csv(output_dir, file_name, episode_index, dataset):
from_idx = dataset.episode_data_index["from"][episode_index]
to_idx = dataset.episode_data_index["to"][episode_index]
- has_state = "observation.state" in dataset.hf_dataset.features
- has_action = "action" in dataset.hf_dataset.features
+ has_state = "observation.state" in dataset.features
+ has_action = "action" in dataset.features
# init header of csv with state and action names
header = ["timestamp"]
if has_state:
- dim_state = len(dataset.hf_dataset["observation.state"][0])
+ dim_state = dataset.meta.shapes["observation.state"][0]
header += [f"state_{i}" for i in range(dim_state)]
if has_action:
- dim_action = len(dataset.hf_dataset["action"][0])
+ dim_action = dataset.meta.shapes["action"][0]
header += [f"action_{i}" for i in range(dim_action)]
columns = ["timestamp"]
@@ -172,27 +171,12 @@ def get_episode_video_paths(dataset: LeRobotDataset, ep_index: int) -> list[str]
first_frame_idx = dataset.episode_data_index["from"][ep_index].item()
return [
dataset.hf_dataset.select_columns(key)[first_frame_idx][key]["path"]
- for key in dataset.video_frame_keys
+ for key in dataset.meta.video_keys
]
-def get_episode_language_instruction(dataset: LeRobotDataset, ep_index: int) -> list[str]:
- # check if the dataset has language instructions
- if "language_instruction" not in dataset.hf_dataset.features:
- return None
-
- # get first frame index
- first_frame_idx = dataset.episode_data_index["from"][ep_index].item()
-
- language_instruction = dataset.hf_dataset[first_frame_idx]["language_instruction"]
- # TODO (michel-aractingi) hack to get the sentence, some strings in openx are badly stored
- # with the tf.tensor appearing in the string
- return language_instruction.removeprefix("tf.Tensor(b'").removesuffix("', shape=(), dtype=string)")
-
-
def visualize_dataset_html(
- repo_id: str,
- root: Path | None = None,
+ dataset: LeRobotDataset,
episodes: list[int] = None,
output_dir: Path | None = None,
serve: bool = True,
@@ -202,13 +186,11 @@ def visualize_dataset_html(
) -> Path | None:
init_logging()
- dataset = LeRobotDataset(repo_id, root=root)
-
- if not dataset.video:
- raise NotImplementedError(f"Image datasets ({dataset.video=}) are currently not supported.")
+ if len(dataset.meta.image_keys) > 0:
+ raise NotImplementedError(f"Image keys ({dataset.meta.image_keys=}) are currently not supported.")
if output_dir is None:
- output_dir = f"outputs/visualize_dataset_html/{repo_id}"
+ output_dir = f"outputs/visualize_dataset_html/{dataset.repo_id}"
output_dir = Path(output_dir)
if output_dir.exists():
@@ -225,7 +207,7 @@ def visualize_dataset_html(
static_dir.mkdir(parents=True, exist_ok=True)
ln_videos_dir = static_dir / "videos"
if not ln_videos_dir.exists():
- ln_videos_dir.symlink_to(dataset.videos_dir.resolve())
+ ln_videos_dir.symlink_to((dataset.root / "videos").resolve())
template_dir = Path(__file__).resolve().parent.parent / "templates"
@@ -252,6 +234,12 @@ def main():
required=True,
help="Name of hugging face repositery containing a LeRobotDataset dataset (e.g. `lerobot/pusht` for https://huggingface.co/datasets/lerobot/pusht).",
)
+ parser.add_argument(
+ "--local-files-only",
+ type=int,
+ default=0,
+ help="Use local files only. By default, this script will try to fetch the dataset from the hub if it exists.",
+ )
parser.add_argument(
"--root",
type=Path,
@@ -297,7 +285,13 @@ def main():
)
args = parser.parse_args()
- visualize_dataset_html(**vars(args))
+ kwargs = vars(args)
+ repo_id = kwargs.pop("repo_id")
+ root = kwargs.pop("root")
+ local_files_only = kwargs.pop("local_files_only")
+
+ dataset = LeRobotDataset(repo_id, root=root, local_files_only=local_files_only)
+ visualize_dataset_html(dataset, **kwargs)
if __name__ == "__main__":
diff --git a/lerobot/scripts/visualize_image_transforms.py b/lerobot/scripts/visualize_image_transforms.py
index e7cd35827..f9fb5c08a 100644
--- a/lerobot/scripts/visualize_image_transforms.py
+++ b/lerobot/scripts/visualize_image_transforms.py
@@ -157,7 +157,7 @@ def visualize_transforms(cfg, output_dir: Path, n_examples: int = 5):
output_dir.mkdir(parents=True, exist_ok=True)
# Get 1st frame from 1st camera of 1st episode
- original_frame = dataset[0][dataset.camera_keys[0]]
+ original_frame = dataset[0][dataset.meta.camera_keys[0]]
to_pil(original_frame).save(output_dir / "original_frame.png", quality=100)
print("\nOriginal frame saved to:")
print(f" {output_dir / 'original_frame.png'}.")
diff --git a/lerobot/templates/visualize_dataset_template.html b/lerobot/templates/visualize_dataset_template.html
index 4f0bd343e..0fa1e713e 100644
--- a/lerobot/templates/visualize_dataset_template.html
+++ b/lerobot/templates/visualize_dataset_template.html
@@ -35,7 +35,7 @@ {{ dataset_info.repo_id }}
-
- Number of samples/frames: {{ dataset_info.num_samples }}
+ Number of samples/frames: {{ dataset_info.num_frames }}
-
Number of episodes: {{ dataset_info.num_episodes }}
@@ -250,7 +250,7 @@
if(!canPlayVideos){
this.videoCodecError = true;
}
-
+
// process CSV data
this.videos = document.querySelectorAll('video');
this.video = this.videos[0];
diff --git a/media/gym/aloha_act.gif b/media/gym/aloha_act.gif
new file mode 100644
index 000000000..0285a3dd1
Binary files /dev/null and b/media/gym/aloha_act.gif differ
diff --git a/media/gym/pusht_diffusion.gif b/media/gym/pusht_diffusion.gif
new file mode 100644
index 000000000..2c0129048
Binary files /dev/null and b/media/gym/pusht_diffusion.gif differ
diff --git a/media/gym/simxarm_tdmpc.gif b/media/gym/simxarm_tdmpc.gif
new file mode 100644
index 000000000..fc7a19b14
Binary files /dev/null and b/media/gym/simxarm_tdmpc.gif differ
diff --git a/media/moss/follower_initial.webp b/media/moss/follower_initial.webp
new file mode 100644
index 000000000..e7ded16bd
Binary files /dev/null and b/media/moss/follower_initial.webp differ
diff --git a/media/moss/follower_rest.webp b/media/moss/follower_rest.webp
new file mode 100644
index 000000000..f0dba18bd
Binary files /dev/null and b/media/moss/follower_rest.webp differ
diff --git a/media/moss/follower_rotated.webp b/media/moss/follower_rotated.webp
new file mode 100644
index 000000000..23d5aa9c1
Binary files /dev/null and b/media/moss/follower_rotated.webp differ
diff --git a/media/moss/follower_zero.webp b/media/moss/follower_zero.webp
new file mode 100644
index 000000000..10ef83704
Binary files /dev/null and b/media/moss/follower_zero.webp differ
diff --git a/media/moss/leader_rest.webp b/media/moss/leader_rest.webp
new file mode 100644
index 000000000..cd77d294d
Binary files /dev/null and b/media/moss/leader_rest.webp differ
diff --git a/media/moss/leader_rotated.webp b/media/moss/leader_rotated.webp
new file mode 100644
index 000000000..c3426650a
Binary files /dev/null and b/media/moss/leader_rotated.webp differ
diff --git a/media/moss/leader_zero.webp b/media/moss/leader_zero.webp
new file mode 100644
index 000000000..d79ed3736
Binary files /dev/null and b/media/moss/leader_zero.webp differ
diff --git a/media/so100/follower_initial.webp b/media/so100/follower_initial.webp
new file mode 100644
index 000000000..7f93a773a
Binary files /dev/null and b/media/so100/follower_initial.webp differ
diff --git a/media/so100/follower_rest.webp b/media/so100/follower_rest.webp
new file mode 100644
index 000000000..971fbc684
Binary files /dev/null and b/media/so100/follower_rest.webp differ
diff --git a/media/so100/follower_rotated.webp b/media/so100/follower_rotated.webp
new file mode 100644
index 000000000..b13d7d7d5
Binary files /dev/null and b/media/so100/follower_rotated.webp differ
diff --git a/media/so100/follower_zero.webp b/media/so100/follower_zero.webp
new file mode 100644
index 000000000..411a55545
Binary files /dev/null and b/media/so100/follower_zero.webp differ
diff --git a/media/so100/leader_follower.webp b/media/so100/leader_follower.webp
new file mode 100644
index 000000000..83cf4b231
Binary files /dev/null and b/media/so100/leader_follower.webp differ
diff --git a/media/so100/leader_rest.webp b/media/so100/leader_rest.webp
new file mode 100644
index 000000000..351667778
Binary files /dev/null and b/media/so100/leader_rest.webp differ
diff --git a/media/so100/leader_rotated.webp b/media/so100/leader_rotated.webp
new file mode 100644
index 000000000..1f770f6ce
Binary files /dev/null and b/media/so100/leader_rotated.webp differ
diff --git a/media/so100/leader_zero.webp b/media/so100/leader_zero.webp
new file mode 100644
index 000000000..5f8c235f9
Binary files /dev/null and b/media/so100/leader_zero.webp differ
diff --git a/poetry.lock b/poetry.lock
index 40bf29eff..8799e67ca 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
[[package]]
name = "absl-py"
@@ -11,101 +11,141 @@ files = [
{file = "absl_py-2.1.0-py3-none-any.whl", hash = "sha256:526a04eadab8b4ee719ce68f204172ead1027549089702d99b9059f129ff1308"},
]
+[[package]]
+name = "aiohappyeyeballs"
+version = "2.4.3"
+description = "Happy Eyeballs for asyncio"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "aiohappyeyeballs-2.4.3-py3-none-any.whl", hash = "sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572"},
+ {file = "aiohappyeyeballs-2.4.3.tar.gz", hash = "sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586"},
+]
+
[[package]]
name = "aiohttp"
-version = "3.9.5"
+version = "3.10.10"
description = "Async http client/server framework (asyncio)"
optional = false
python-versions = ">=3.8"
files = [
- {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"},
- {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"},
- {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"},
- {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"},
- {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"},
- {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"},
- {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"},
- {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"},
- {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"},
- {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"},
- {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"},
- {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"},
- {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"},
- {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"},
- {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"},
- {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"},
- {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"},
- {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"},
- {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"},
- {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"},
- {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"},
- {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"},
- {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"},
- {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"},
- {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"},
- {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"},
- {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"},
- {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"},
- {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"},
- {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"},
- {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"},
- {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"},
- {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"},
- {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"},
- {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"},
- {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"},
- {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"},
- {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"},
- {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"},
- {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"},
- {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"},
- {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"},
- {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"},
- {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"},
- {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"},
- {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"},
- {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"},
- {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"},
- {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"},
- {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"},
- {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"},
- {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"},
- {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"},
- {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"},
- {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"},
- {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"},
- {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"},
- {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"},
- {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"},
- {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"},
- {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"},
- {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"},
- {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"},
- {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"},
- {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"},
- {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"},
- {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"},
- {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"},
- {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"},
- {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"},
- {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"},
- {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"},
- {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"},
- {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"},
- {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"},
- {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"},
+ {file = "aiohttp-3.10.10-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:be7443669ae9c016b71f402e43208e13ddf00912f47f623ee5994e12fc7d4b3f"},
+ {file = "aiohttp-3.10.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b06b7843929e41a94ea09eb1ce3927865387e3e23ebe108e0d0d09b08d25be9"},
+ {file = "aiohttp-3.10.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:333cf6cf8e65f6a1e06e9eb3e643a0c515bb850d470902274239fea02033e9a8"},
+ {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:274cfa632350225ce3fdeb318c23b4a10ec25c0e2c880eff951a3842cf358ac1"},
+ {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9e5e4a85bdb56d224f412d9c98ae4cbd032cc4f3161818f692cd81766eee65a"},
+ {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b606353da03edcc71130b52388d25f9a30a126e04caef1fd637e31683033abd"},
+ {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab5a5a0c7a7991d90446a198689c0535be89bbd6b410a1f9a66688f0880ec026"},
+ {file = "aiohttp-3.10.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:578a4b875af3e0daaf1ac6fa983d93e0bbfec3ead753b6d6f33d467100cdc67b"},
+ {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8105fd8a890df77b76dd3054cddf01a879fc13e8af576805d667e0fa0224c35d"},
+ {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3bcd391d083f636c06a68715e69467963d1f9600f85ef556ea82e9ef25f043f7"},
+ {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fbc6264158392bad9df19537e872d476f7c57adf718944cc1e4495cbabf38e2a"},
+ {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e48d5021a84d341bcaf95c8460b152cfbad770d28e5fe14a768988c461b821bc"},
+ {file = "aiohttp-3.10.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2609e9ab08474702cc67b7702dbb8a80e392c54613ebe80db7e8dbdb79837c68"},
+ {file = "aiohttp-3.10.10-cp310-cp310-win32.whl", hash = "sha256:84afcdea18eda514c25bc68b9af2a2b1adea7c08899175a51fe7c4fb6d551257"},
+ {file = "aiohttp-3.10.10-cp310-cp310-win_amd64.whl", hash = "sha256:9c72109213eb9d3874f7ac8c0c5fa90e072d678e117d9061c06e30c85b4cf0e6"},
+ {file = "aiohttp-3.10.10-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c30a0eafc89d28e7f959281b58198a9fa5e99405f716c0289b7892ca345fe45f"},
+ {file = "aiohttp-3.10.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:258c5dd01afc10015866114e210fb7365f0d02d9d059c3c3415382ab633fcbcb"},
+ {file = "aiohttp-3.10.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:15ecd889a709b0080f02721255b3f80bb261c2293d3c748151274dfea93ac871"},
+ {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3935f82f6f4a3820270842e90456ebad3af15810cf65932bd24da4463bc0a4c"},
+ {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:413251f6fcf552a33c981c4709a6bba37b12710982fec8e558ae944bfb2abd38"},
+ {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1720b4f14c78a3089562b8875b53e36b51c97c51adc53325a69b79b4b48ebcb"},
+ {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:679abe5d3858b33c2cf74faec299fda60ea9de62916e8b67e625d65bf069a3b7"},
+ {file = "aiohttp-3.10.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79019094f87c9fb44f8d769e41dbb664d6e8fcfd62f665ccce36762deaa0e911"},
+ {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe2fb38c2ed905a2582948e2de560675e9dfbee94c6d5ccdb1301c6d0a5bf092"},
+ {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a3f00003de6eba42d6e94fabb4125600d6e484846dbf90ea8e48a800430cc142"},
+ {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:1bbb122c557a16fafc10354b9d99ebf2f2808a660d78202f10ba9d50786384b9"},
+ {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:30ca7c3b94708a9d7ae76ff281b2f47d8eaf2579cd05971b5dc681db8caac6e1"},
+ {file = "aiohttp-3.10.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:df9270660711670e68803107d55c2b5949c2e0f2e4896da176e1ecfc068b974a"},
+ {file = "aiohttp-3.10.10-cp311-cp311-win32.whl", hash = "sha256:aafc8ee9b742ce75044ae9a4d3e60e3d918d15a4c2e08a6c3c3e38fa59b92d94"},
+ {file = "aiohttp-3.10.10-cp311-cp311-win_amd64.whl", hash = "sha256:362f641f9071e5f3ee6f8e7d37d5ed0d95aae656adf4ef578313ee585b585959"},
+ {file = "aiohttp-3.10.10-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9294bbb581f92770e6ed5c19559e1e99255e4ca604a22c5c6397b2f9dd3ee42c"},
+ {file = "aiohttp-3.10.10-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a8fa23fe62c436ccf23ff930149c047f060c7126eae3ccea005f0483f27b2e28"},
+ {file = "aiohttp-3.10.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c6a5b8c7926ba5d8545c7dd22961a107526562da31a7a32fa2456baf040939f"},
+ {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:007ec22fbc573e5eb2fb7dec4198ef8f6bf2fe4ce20020798b2eb5d0abda6138"},
+ {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9627cc1a10c8c409b5822a92d57a77f383b554463d1884008e051c32ab1b3742"},
+ {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:50edbcad60d8f0e3eccc68da67f37268b5144ecc34d59f27a02f9611c1d4eec7"},
+ {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a45d85cf20b5e0d0aa5a8dca27cce8eddef3292bc29d72dcad1641f4ed50aa16"},
+ {file = "aiohttp-3.10.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b00807e2605f16e1e198f33a53ce3c4523114059b0c09c337209ae55e3823a8"},
+ {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f2d4324a98062be0525d16f768a03e0bbb3b9fe301ceee99611dc9a7953124e6"},
+ {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:438cd072f75bb6612f2aca29f8bd7cdf6e35e8f160bc312e49fbecab77c99e3a"},
+ {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:baa42524a82f75303f714108fea528ccacf0386af429b69fff141ffef1c534f9"},
+ {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a7d8d14fe962153fc681f6366bdec33d4356f98a3e3567782aac1b6e0e40109a"},
+ {file = "aiohttp-3.10.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c1277cd707c465cd09572a774559a3cc7c7a28802eb3a2a9472588f062097205"},
+ {file = "aiohttp-3.10.10-cp312-cp312-win32.whl", hash = "sha256:59bb3c54aa420521dc4ce3cc2c3fe2ad82adf7b09403fa1f48ae45c0cbde6628"},
+ {file = "aiohttp-3.10.10-cp312-cp312-win_amd64.whl", hash = "sha256:0e1b370d8007c4ae31ee6db7f9a2fe801a42b146cec80a86766e7ad5c4a259cf"},
+ {file = "aiohttp-3.10.10-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ad7593bb24b2ab09e65e8a1d385606f0f47c65b5a2ae6c551db67d6653e78c28"},
+ {file = "aiohttp-3.10.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1eb89d3d29adaf533588f209768a9c02e44e4baf832b08118749c5fad191781d"},
+ {file = "aiohttp-3.10.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3fe407bf93533a6fa82dece0e74dbcaaf5d684e5a51862887f9eaebe6372cd79"},
+ {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aed5155f819873d23520919e16703fc8925e509abbb1a1491b0087d1cd969e"},
+ {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4f05e9727ce409358baa615dbeb9b969db94324a79b5a5cea45d39bdb01d82e6"},
+ {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dffb610a30d643983aeb185ce134f97f290f8935f0abccdd32c77bed9388b42"},
+ {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa6658732517ddabe22c9036479eabce6036655ba87a0224c612e1ae6af2087e"},
+ {file = "aiohttp-3.10.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:741a46d58677d8c733175d7e5aa618d277cd9d880301a380fd296975a9cdd7bc"},
+ {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e00e3505cd80440f6c98c6d69269dcc2a119f86ad0a9fd70bccc59504bebd68a"},
+ {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ffe595f10566f8276b76dc3a11ae4bb7eba1aac8ddd75811736a15b0d5311414"},
+ {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:bdfcf6443637c148c4e1a20c48c566aa694fa5e288d34b20fcdc58507882fed3"},
+ {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d183cf9c797a5291e8301790ed6d053480ed94070637bfaad914dd38b0981f67"},
+ {file = "aiohttp-3.10.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:77abf6665ae54000b98b3c742bc6ea1d1fb31c394bcabf8b5d2c1ac3ebfe7f3b"},
+ {file = "aiohttp-3.10.10-cp313-cp313-win32.whl", hash = "sha256:4470c73c12cd9109db8277287d11f9dd98f77fc54155fc71a7738a83ffcc8ea8"},
+ {file = "aiohttp-3.10.10-cp313-cp313-win_amd64.whl", hash = "sha256:486f7aabfa292719a2753c016cc3a8f8172965cabb3ea2e7f7436c7f5a22a151"},
+ {file = "aiohttp-3.10.10-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:1b66ccafef7336a1e1f0e389901f60c1d920102315a56df85e49552308fc0486"},
+ {file = "aiohttp-3.10.10-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:acd48d5b80ee80f9432a165c0ac8cbf9253eaddb6113269a5e18699b33958dbb"},
+ {file = "aiohttp-3.10.10-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3455522392fb15ff549d92fbf4b73b559d5e43dc522588f7eb3e54c3f38beee7"},
+ {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45c3b868724137f713a38376fef8120c166d1eadd50da1855c112fe97954aed8"},
+ {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:da1dee8948d2137bb51fbb8a53cce6b1bcc86003c6b42565f008438b806cccd8"},
+ {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5ce2ce7c997e1971b7184ee37deb6ea9922ef5163c6ee5aa3c274b05f9e12fa"},
+ {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28529e08fde6f12eba8677f5a8608500ed33c086f974de68cc65ab218713a59d"},
+ {file = "aiohttp-3.10.10-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7db54c7914cc99d901d93a34704833568d86c20925b2762f9fa779f9cd2e70f"},
+ {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:03a42ac7895406220124c88911ebee31ba8b2d24c98507f4a8bf826b2937c7f2"},
+ {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:7e338c0523d024fad378b376a79faff37fafb3c001872a618cde1d322400a572"},
+ {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:038f514fe39e235e9fef6717fbf944057bfa24f9b3db9ee551a7ecf584b5b480"},
+ {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:64f6c17757251e2b8d885d728b6433d9d970573586a78b78ba8929b0f41d045a"},
+ {file = "aiohttp-3.10.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:93429602396f3383a797a2a70e5f1de5df8e35535d7806c9f91df06f297e109b"},
+ {file = "aiohttp-3.10.10-cp38-cp38-win32.whl", hash = "sha256:c823bc3971c44ab93e611ab1a46b1eafeae474c0c844aff4b7474287b75fe49c"},
+ {file = "aiohttp-3.10.10-cp38-cp38-win_amd64.whl", hash = "sha256:54ca74df1be3c7ca1cf7f4c971c79c2daf48d9aa65dea1a662ae18926f5bc8ce"},
+ {file = "aiohttp-3.10.10-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:01948b1d570f83ee7bbf5a60ea2375a89dfb09fd419170e7f5af029510033d24"},
+ {file = "aiohttp-3.10.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9fc1500fd2a952c5c8e3b29aaf7e3cc6e27e9cfc0a8819b3bce48cc1b849e4cc"},
+ {file = "aiohttp-3.10.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f614ab0c76397661b90b6851a030004dac502e48260ea10f2441abd2207fbcc7"},
+ {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00819de9e45d42584bed046314c40ea7e9aea95411b38971082cad449392b08c"},
+ {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05646ebe6b94cc93407b3bf34b9eb26c20722384d068eb7339de802154d61bc5"},
+ {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:998f3bd3cfc95e9424a6acd7840cbdd39e45bc09ef87533c006f94ac47296090"},
+ {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9010c31cd6fa59438da4e58a7f19e4753f7f264300cd152e7f90d4602449762"},
+ {file = "aiohttp-3.10.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ea7ffc6d6d6f8a11e6f40091a1040995cdff02cfc9ba4c2f30a516cb2633554"},
+ {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ef9c33cc5cbca35808f6c74be11eb7f5f6b14d2311be84a15b594bd3e58b5527"},
+ {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ce0cdc074d540265bfeb31336e678b4e37316849d13b308607efa527e981f5c2"},
+ {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:597a079284b7ee65ee102bc3a6ea226a37d2b96d0418cc9047490f231dc09fe8"},
+ {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:7789050d9e5d0c309c706953e5e8876e38662d57d45f936902e176d19f1c58ab"},
+ {file = "aiohttp-3.10.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e7f8b04d83483577fd9200461b057c9f14ced334dcb053090cea1da9c8321a91"},
+ {file = "aiohttp-3.10.10-cp39-cp39-win32.whl", hash = "sha256:c02a30b904282777d872266b87b20ed8cc0d1501855e27f831320f471d54d983"},
+ {file = "aiohttp-3.10.10-cp39-cp39-win_amd64.whl", hash = "sha256:edfe3341033a6b53a5c522c802deb2079eee5cbfbb0af032a55064bd65c73a23"},
+ {file = "aiohttp-3.10.10.tar.gz", hash = "sha256:0631dd7c9f0822cc61c88586ca76d5b5ada26538097d0f1df510b082bad3411a"},
]
[package.dependencies]
+aiohappyeyeballs = ">=2.3.0"
aiosignal = ">=1.1.2"
async-timeout = {version = ">=4.0,<5.0", markers = "python_version < \"3.11\""}
attrs = ">=17.3.0"
frozenlist = ">=1.1.1"
multidict = ">=4.5,<7.0"
-yarl = ">=1.0,<2.0"
+yarl = ">=1.12.0,<2.0"
[package.extras]
-speedups = ["Brotli", "aiodns", "brotlicffi"]
+speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"]
+
+[[package]]
+name = "aioserial"
+version = "1.3.1"
+description = "An asynchronous serial port library of Python"
+optional = true
+python-versions = ">=3.6,<4.0"
+files = [
+ {file = "aioserial-1.3.1.tar.gz", hash = "sha256:702bf03b0eb84b8ef2d8dac5cb925e1e685dce98f77b125569bc6fd2b3b58228"},
+]
+
+[package.dependencies]
+pyserial = "*"
[[package]]
name = "aiosignal"
@@ -131,6 +171,115 @@ files = [
{file = "antlr4-python3-runtime-4.9.3.tar.gz", hash = "sha256:f224469b4168294902bb1efa80a8bf7855f24c99aef99cbefc1bcd3cce77881b"},
]
+[[package]]
+name = "anyio"
+version = "4.6.0"
+description = "High level compatibility layer for multiple asynchronous event loop implementations"
+optional = true
+python-versions = ">=3.9"
+files = [
+ {file = "anyio-4.6.0-py3-none-any.whl", hash = "sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a"},
+ {file = "anyio-4.6.0.tar.gz", hash = "sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb"},
+]
+
+[package.dependencies]
+exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""}
+idna = ">=2.8"
+sniffio = ">=1.1"
+typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""}
+
+[package.extras]
+doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
+test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"]
+trio = ["trio (>=0.26.1)"]
+
+[[package]]
+name = "appnope"
+version = "0.1.4"
+description = "Disable App Nap on macOS >= 10.9"
+optional = true
+python-versions = ">=3.6"
+files = [
+ {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"},
+ {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"},
+]
+
+[[package]]
+name = "argon2-cffi"
+version = "23.1.0"
+description = "Argon2 for Python"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "argon2_cffi-23.1.0-py3-none-any.whl", hash = "sha256:c670642b78ba29641818ab2e68bd4e6a78ba53b7eff7b4c3815ae16abf91c7ea"},
+ {file = "argon2_cffi-23.1.0.tar.gz", hash = "sha256:879c3e79a2729ce768ebb7d36d4609e3a78a4ca2ec3a9f12286ca057e3d0db08"},
+]
+
+[package.dependencies]
+argon2-cffi-bindings = "*"
+
+[package.extras]
+dev = ["argon2-cffi[tests,typing]", "tox (>4)"]
+docs = ["furo", "myst-parser", "sphinx", "sphinx-copybutton", "sphinx-notfound-page"]
+tests = ["hypothesis", "pytest"]
+typing = ["mypy"]
+
+[[package]]
+name = "argon2-cffi-bindings"
+version = "21.2.0"
+description = "Low-level CFFI bindings for Argon2"
+optional = true
+python-versions = ">=3.6"
+files = [
+ {file = "argon2-cffi-bindings-21.2.0.tar.gz", hash = "sha256:bb89ceffa6c791807d1305ceb77dbfacc5aa499891d2c55661c6459651fc39e3"},
+ {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ccb949252cb2ab3a08c02024acb77cfb179492d5701c7cbdbfd776124d4d2367"},
+ {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9524464572e12979364b7d600abf96181d3541da11e23ddf565a32e70bd4dc0d"},
+ {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b746dba803a79238e925d9046a63aa26bf86ab2a2fe74ce6b009a1c3f5c8f2ae"},
+ {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:58ed19212051f49a523abb1dbe954337dc82d947fb6e5a0da60f7c8471a8476c"},
+ {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd46088725ef7f58b5a1ef7ca06647ebaf0eb4baff7d1d0d177c6cc8744abd86"},
+ {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_i686.whl", hash = "sha256:8cd69c07dd875537a824deec19f978e0f2078fdda07fd5c42ac29668dda5f40f"},
+ {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f1152ac548bd5b8bcecfb0b0371f082037e47128653df2e8ba6e914d384f3c3e"},
+ {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win32.whl", hash = "sha256:603ca0aba86b1349b147cab91ae970c63118a0f30444d4bc80355937c950c082"},
+ {file = "argon2_cffi_bindings-21.2.0-cp36-abi3-win_amd64.whl", hash = "sha256:b2ef1c30440dbbcba7a5dc3e319408b59676e2e039e2ae11a8775ecf482b192f"},
+ {file = "argon2_cffi_bindings-21.2.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e415e3f62c8d124ee16018e491a009937f8cf7ebf5eb430ffc5de21b900dad93"},
+ {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e385d1c39c520c08b53d63300c3ecc28622f076f4c2b0e6d7e796e9f6502194"},
+ {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3e3cc67fdb7d82c4718f19b4e7a87123caf8a93fde7e23cf66ac0337d3cb3f"},
+ {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a22ad9800121b71099d0fb0a65323810a15f2e292f2ba450810a7316e128ee5"},
+ {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9f8b450ed0547e3d473fdc8612083fd08dd2120d6ac8f73828df9b7d45bb351"},
+ {file = "argon2_cffi_bindings-21.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:93f9bf70084f97245ba10ee36575f0c3f1e7d7724d67d8e5b08e61787c320ed7"},
+ {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3b9ef65804859d335dc6b31582cad2c5166f0c3e7975f324d9ffaa34ee7e6583"},
+ {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4966ef5848d820776f5f562a7d45fdd70c2f330c961d0d745b784034bd9f48d"},
+ {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ef543a89dee4db46a1a6e206cd015360e5a75822f76df533845c3cbaf72670"},
+ {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed2937d286e2ad0cc79a7087d3c272832865f779430e0cc2b4f3718d3159b0cb"},
+ {file = "argon2_cffi_bindings-21.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5e00316dabdaea0b2dd82d141cc66889ced0cdcbfa599e8b471cf22c620c329a"},
+]
+
+[package.dependencies]
+cffi = ">=1.0.1"
+
+[package.extras]
+dev = ["cogapp", "pre-commit", "pytest", "wheel"]
+tests = ["pytest"]
+
+[[package]]
+name = "arrow"
+version = "1.3.0"
+description = "Better dates & times for Python"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"},
+ {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"},
+]
+
+[package.dependencies]
+python-dateutil = ">=2.7.0"
+types-python-dateutil = ">=2.8.10"
+
+[package.extras]
+doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"]
+test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"]
+
[[package]]
name = "asciitree"
version = "0.3.3"
@@ -141,6 +290,38 @@ files = [
{file = "asciitree-0.3.3.tar.gz", hash = "sha256:4aa4b9b649f85e3fcb343363d97564aa1fb62e249677f2e18a96765145cc0f6e"},
]
+[[package]]
+name = "asttokens"
+version = "2.4.1"
+description = "Annotate AST trees with source code positions"
+optional = true
+python-versions = "*"
+files = [
+ {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"},
+ {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"},
+]
+
+[package.dependencies]
+six = ">=1.12.0"
+
+[package.extras]
+astroid = ["astroid (>=1,<2)", "astroid (>=2,<4)"]
+test = ["astroid (>=1,<2)", "astroid (>=2,<4)", "pytest"]
+
+[[package]]
+name = "async-lru"
+version = "2.0.4"
+description = "Simple LRU cache for asyncio"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "async-lru-2.0.4.tar.gz", hash = "sha256:b8a59a5df60805ff63220b2a0c5b5393da5521b113cd5465a44eb037d81a5627"},
+ {file = "async_lru-2.0.4-py3-none-any.whl", hash = "sha256:ff02944ce3c288c5be660c42dbcca0742b32c3b279d6dceda655190240b99224"},
+]
+
+[package.dependencies]
+typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""}
+
[[package]]
name = "async-timeout"
version = "4.0.3"
@@ -154,22 +335,36 @@ files = [
[[package]]
name = "attrs"
-version = "23.2.0"
+version = "24.2.0"
description = "Classes Without Boilerplate"
optional = false
python-versions = ">=3.7"
files = [
- {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
- {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
+ {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"},
+ {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"},
]
[package.extras]
-cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
-dev = ["attrs[tests]", "pre-commit"]
-docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
-tests = ["attrs[tests-no-zope]", "zope-interface"]
-tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
-tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
+benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
+cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
+dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
+docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
+tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
+tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"]
+
+[[package]]
+name = "babel"
+version = "2.16.0"
+description = "Internationalization utilities"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"},
+ {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"},
+]
+
+[package.extras]
+dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"]
[[package]]
name = "beautifulsoup4"
@@ -192,6 +387,24 @@ charset-normalizer = ["charset-normalizer"]
html5lib = ["html5lib"]
lxml = ["lxml"]
+[[package]]
+name = "bleach"
+version = "6.1.0"
+description = "An easy safelist-based HTML-sanitizing tool."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "bleach-6.1.0-py3-none-any.whl", hash = "sha256:3225f354cfc436b9789c66c4ee030194bee0568fbf9cbdad3bc8b5c26c5f12b6"},
+ {file = "bleach-6.1.0.tar.gz", hash = "sha256:0a31f1837963c41d46bbf1331b8778e1308ea0791db03cc4e7357b97cf42a8fe"},
+]
+
+[package.dependencies]
+six = ">=1.9.0"
+webencodings = "*"
+
+[package.extras]
+css = ["tinycss2 (>=1.1.0,<1.3)"]
+
[[package]]
name = "blinker"
version = "1.8.2"
@@ -205,74 +418,89 @@ files = [
[[package]]
name = "certifi"
-version = "2024.7.4"
+version = "2024.8.30"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
- {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"},
- {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"},
+ {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"},
+ {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"},
]
[[package]]
name = "cffi"
-version = "1.16.0"
+version = "1.17.1"
description = "Foreign Function Interface for Python calling C code."
optional = false
python-versions = ">=3.8"
files = [
- {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"},
- {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"},
- {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"},
- {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"},
- {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"},
- {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"},
- {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"},
- {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"},
- {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"},
- {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"},
- {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"},
- {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"},
- {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"},
- {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"},
- {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"},
- {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"},
- {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"},
- {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"},
- {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"},
- {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"},
- {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"},
- {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"},
- {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"},
- {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"},
- {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"},
- {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"},
- {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"},
- {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"},
- {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"},
- {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"},
- {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"},
- {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"},
- {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"},
- {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"},
- {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"},
- {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"},
- {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"},
- {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"},
- {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"},
- {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"},
+ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"},
+ {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"},
+ {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"},
+ {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"},
+ {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"},
+ {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"},
+ {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"},
+ {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"},
+ {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"},
+ {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"},
+ {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"},
+ {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"},
+ {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"},
+ {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"},
+ {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"},
+ {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"},
+ {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"},
+ {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"},
+ {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"},
+ {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"},
+ {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"},
+ {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"},
+ {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"},
+ {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"},
+ {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"},
+ {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"},
+ {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"},
+ {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"},
+ {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"},
+ {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"},
+ {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"},
+ {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"},
+ {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"},
+ {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"},
+ {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"},
+ {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"},
+ {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"},
+ {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"},
+ {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"},
+ {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"},
+ {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"},
+ {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"},
+ {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"},
]
[package.dependencies]
@@ -291,101 +519,127 @@ files = [
[[package]]
name = "charset-normalizer"
-version = "3.3.2"
+version = "3.4.0"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
python-versions = ">=3.7.0"
files = [
- {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"},
- {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"},
- {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"},
- {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"},
- {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"},
- {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"},
- {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"},
- {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"},
+ {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"},
+ {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"},
+ {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"},
+ {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"},
+ {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"},
+ {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"},
+ {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"},
+ {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"},
+ {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"},
+ {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"},
+ {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"},
+ {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"},
+ {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"},
+ {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"},
+ {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"},
+ {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"},
+ {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"},
+ {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"},
+ {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"},
+ {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"},
+ {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"},
+ {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"},
+ {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"},
+ {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"},
+ {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"},
+ {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"},
+ {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"},
+ {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"},
+ {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"},
+ {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"},
+ {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"},
+ {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"},
+ {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"},
+ {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"},
+ {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"},
+]
+
+[[package]]
+name = "chime"
+version = "0.7.0"
+description = "Python sound notifications made easy."
+optional = true
+python-versions = ">=3.6,<4.0"
+files = [
+ {file = "chime-0.7.0-py3-none-any.whl", hash = "sha256:9626f8151cb008b1e0ffb7de6d1834b7013ba5fc4c4e3c9ba6e29dc9bf5feac6"},
+ {file = "chime-0.7.0.tar.gz", hash = "sha256:ba4af8934ec8bd9a89a340b4433b2e500097b979823386432be7128e0b201f0d"},
]
[[package]]
@@ -413,30 +667,48 @@ files = [
{file = "cloudpickle-3.0.0.tar.gz", hash = "sha256:996d9a482c6fb4f33c1a35335cf8afd065d2a56e973270364840712d9131a882"},
]
+[[package]]
+name = "cma"
+version = "4.0.0"
+description = "CMA-ES, Covariance Matrix Adaptation Evolution Strategy for non-linear numerical optimization in Python"
+optional = true
+python-versions = "*"
+files = [
+ {file = "cma-4.0.0-py3-none-any.whl", hash = "sha256:97b86ba1ac9f1cbb189a06c4d4a78f591f0878e5dd3e55c95e88e622e78c1a10"},
+ {file = "cma-4.0.0.tar.gz", hash = "sha256:fd28ce56983bf2fca0e614189d60134ebb80bf604f070d1ea095ea4e856f13a5"},
+]
+
+[package.dependencies]
+numpy = "*"
+
+[package.extras]
+constrained-solution-tracking = ["moarchiving"]
+plotting = ["matplotlib"]
+
[[package]]
name = "cmake"
-version = "3.30.0"
+version = "3.30.4"
description = "CMake is an open-source, cross-platform family of tools designed to build, test and package software"
optional = false
python-versions = ">=3.7"
files = [
- {file = "cmake-3.30.0-py3-none-macosx_10_10_x86_64.macosx_11_0_universal2.macosx_11_0_arm64.whl", hash = "sha256:9caf5839d041f3276596abf564267f7bbaf4b36731ad1f574f3d4c04d7f8c26b"},
- {file = "cmake-3.30.0-py3-none-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2c19c50ee12fb1fddb636401b60f301e873b1f0bc726968509556450496c26fb"},
- {file = "cmake-3.30.0-py3-none-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cc343a5fd4b3013e313083fd3226f4599210560e4d72743faa98057e9f41ccea"},
- {file = "cmake-3.30.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbe32916158e6ca2f45f6e1dc4578a99f5c9ab6cfc7e4f812fae284d54c4749d"},
- {file = "cmake-3.30.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a981336efd0d97a02bab4aba90f989077516a42c2510a1ba216f1a5cc00656f"},
- {file = "cmake-3.30.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59b8491d54064bf734e709001b1f79b1356a4c6c016f78445d5c0516785d096b"},
- {file = "cmake-3.30.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968e00571f6c07f36b2226a8dbd63eeba4888bcc2f9f30b1dbd2673f75b98564"},
- {file = "cmake-3.30.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e123afb34f08e38e76cd3303d1cea166f15ec7acd48353b6fe9d1175b10b4553"},
- {file = "cmake-3.30.0-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:d7c6265b3d066b25eaf07fc69b8672c28f531b59403cbabb864219f84098b378"},
- {file = "cmake-3.30.0-py3-none-musllinux_1_1_i686.whl", hash = "sha256:a6960b4b9e91bbcd68fc1a0395306a0eab68981752e667d4dc1721d9ad895358"},
- {file = "cmake-3.30.0-py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:100da4b77c2133a426ec6bffc01efcbdd9c212665c0b9acaa20bcaf98dc75097"},
- {file = "cmake-3.30.0-py3-none-musllinux_1_1_s390x.whl", hash = "sha256:e6e3ab9d48d5bf5564840e8152bcfe41a9318b1fe95b1410f8cc1f15800ff2bf"},
- {file = "cmake-3.30.0-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:bfb761c3dc275034d251494503e643dc8f23d15e8e6284eca1b2bfbde4634851"},
- {file = "cmake-3.30.0-py3-none-win32.whl", hash = "sha256:23253f76f44f0f69cf18c8343e56184ea3ab51e837198db691fbdef1bf986455"},
- {file = "cmake-3.30.0-py3-none-win_amd64.whl", hash = "sha256:aa9b483ff53804566909ec7ef8c25eaf4226c224756d731cb3dd28d9be2dea46"},
- {file = "cmake-3.30.0-py3-none-win_arm64.whl", hash = "sha256:fc9aba5cc8a631cbbe7a6b4b6b1f981346e70af35900459b4ac6a1b18f489568"},
- {file = "cmake-3.30.0.tar.gz", hash = "sha256:b6b9b584ce226dfde4d419578a2ae542e72409655c0ea2c989d5f9bb688cf024"},
+ {file = "cmake-3.30.4-py3-none-macosx_10_10_universal2.whl", hash = "sha256:8a1a30125213c3d44b81a1af0085ad1dcd77abc61bcdf330556e83898428198a"},
+ {file = "cmake-3.30.4-py3-none-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9f69b3706ae93fa48762871bdc7cb759fbbbadb04452e5eab820537c35fabcb6"},
+ {file = "cmake-3.30.4-py3-none-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:969af8432a17168e5b88e2efba11e5e14b7ca38aa638975b7ce1b19044c5183f"},
+ {file = "cmake-3.30.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a5929e21af39a3adf4058aea54aa2197198e06315ebad541dda0baf20e2b32b"},
+ {file = "cmake-3.30.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9808d3744e57c6fd71d93e2ca95142d67578a13a8867f7e8b000f343799899f"},
+ {file = "cmake-3.30.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0a223c62cfeebcb7b90f715c16bb2e83ee37e8c3e676efde83b094d62c278ec2"},
+ {file = "cmake-3.30.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08466455fbac67287a4868819ae0e0ab16d60c02eb209ae5e6d70e0e35d0e601"},
+ {file = "cmake-3.30.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b8a4b0e638ddbabd16cad8b053b5a66733ddaf652dc3d46d55b3887314022fe"},
+ {file = "cmake-3.30.4-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:a8f3160cc2b362c0ba03d70300a36bca5a58e1f82c345f4f54a4da7f59b7b2b4"},
+ {file = "cmake-3.30.4-py3-none-musllinux_1_1_i686.whl", hash = "sha256:13bd1afa2e9988973f18c2425823081a044929e80685731601f093ff673d2db7"},
+ {file = "cmake-3.30.4-py3-none-musllinux_1_1_ppc64le.whl", hash = "sha256:d2ab1018a42e03cf2e843f9565bc2ff7465a1a66c1cbfaba30d494a5e26f763e"},
+ {file = "cmake-3.30.4-py3-none-musllinux_1_1_s390x.whl", hash = "sha256:2d6367a438c11f0863c9cdea843acd09514e94534ce0d115bc8f7905aaff243d"},
+ {file = "cmake-3.30.4-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:e4cc37735bdc7ba058abdddd3f94ac9dc32cae0f94ae68661565b39f64a9a22f"},
+ {file = "cmake-3.30.4-py3-none-win32.whl", hash = "sha256:a08e9a987be5da69941f4a26dd7614fcbb5039394821fbcce9716c20a1571c0c"},
+ {file = "cmake-3.30.4-py3-none-win_amd64.whl", hash = "sha256:2d128d0831924788c1e87d6ca9abe4594e2ccde718712b0fa2c8c3a99b0d1282"},
+ {file = "cmake-3.30.4-py3-none-win_arm64.whl", hash = "sha256:2825874fb84bd9d05c40b1a4347366d9949c9f6bac7a9ace97ac7faf9d573b8b"},
+ {file = "cmake-3.30.4.tar.gz", hash = "sha256:fedd88495e742a1316078c283c2b4c2eeac4c34eca3234401d28f09ee58a320f"},
]
[package.extras]
@@ -453,65 +725,191 @@ files = [
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
+[[package]]
+name = "comm"
+version = "0.2.2"
+description = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"},
+ {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"},
+]
+
+[package.dependencies]
+traitlets = ">=4"
+
+[package.extras]
+test = ["pytest"]
+
+[[package]]
+name = "configargparse"
+version = "1.7"
+description = "A drop-in replacement for argparse that allows options to also be set via config files and/or environment variables."
+optional = true
+python-versions = ">=3.5"
+files = [
+ {file = "ConfigArgParse-1.7-py3-none-any.whl", hash = "sha256:d249da6591465c6c26df64a9f73d2536e743be2f244eb3ebe61114af2f94f86b"},
+ {file = "ConfigArgParse-1.7.tar.gz", hash = "sha256:e7067471884de5478c58a511e529f0f9bd1c66bfef1dea90935438d6c23306d1"},
+]
+
+[package.extras]
+test = ["PyYAML", "mock", "pytest"]
+yaml = ["PyYAML"]
+
+[[package]]
+name = "contourpy"
+version = "1.3.0"
+description = "Python library for calculating contours of 2D quadrilateral grids"
+optional = true
+python-versions = ">=3.9"
+files = [
+ {file = "contourpy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:880ea32e5c774634f9fcd46504bf9f080a41ad855f4fef54f5380f5133d343c7"},
+ {file = "contourpy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:76c905ef940a4474a6289c71d53122a4f77766eef23c03cd57016ce19d0f7b42"},
+ {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92f8557cbb07415a4d6fa191f20fd9d2d9eb9c0b61d1b2f52a8926e43c6e9af7"},
+ {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36f965570cff02b874773c49bfe85562b47030805d7d8360748f3eca570f4cab"},
+ {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cacd81e2d4b6f89c9f8a5b69b86490152ff39afc58a95af002a398273e5ce589"},
+ {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69375194457ad0fad3a839b9e29aa0b0ed53bb54db1bfb6c3ae43d111c31ce41"},
+ {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a52040312b1a858b5e31ef28c2e865376a386c60c0e248370bbea2d3f3b760d"},
+ {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3faeb2998e4fcb256542e8a926d08da08977f7f5e62cf733f3c211c2a5586223"},
+ {file = "contourpy-1.3.0-cp310-cp310-win32.whl", hash = "sha256:36e0cff201bcb17a0a8ecc7f454fe078437fa6bda730e695a92f2d9932bd507f"},
+ {file = "contourpy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:87ddffef1dbe5e669b5c2440b643d3fdd8622a348fe1983fad7a0f0ccb1cd67b"},
+ {file = "contourpy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fa4c02abe6c446ba70d96ece336e621efa4aecae43eaa9b030ae5fb92b309ad"},
+ {file = "contourpy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:834e0cfe17ba12f79963861e0f908556b2cedd52e1f75e6578801febcc6a9f49"},
+ {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbc4c3217eee163fa3984fd1567632b48d6dfd29216da3ded3d7b844a8014a66"},
+ {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4865cd1d419e0c7a7bf6de1777b185eebdc51470800a9f42b9e9decf17762081"},
+ {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:303c252947ab4b14c08afeb52375b26781ccd6a5ccd81abcdfc1fafd14cf93c1"},
+ {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637f674226be46f6ba372fd29d9523dd977a291f66ab2a74fbeb5530bb3f445d"},
+ {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:76a896b2f195b57db25d6b44e7e03f221d32fe318d03ede41f8b4d9ba1bff53c"},
+ {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e1fd23e9d01591bab45546c089ae89d926917a66dceb3abcf01f6105d927e2cb"},
+ {file = "contourpy-1.3.0-cp311-cp311-win32.whl", hash = "sha256:d402880b84df3bec6eab53cd0cf802cae6a2ef9537e70cf75e91618a3801c20c"},
+ {file = "contourpy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:6cb6cc968059db9c62cb35fbf70248f40994dfcd7aa10444bbf8b3faeb7c2d67"},
+ {file = "contourpy-1.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:570ef7cf892f0afbe5b2ee410c507ce12e15a5fa91017a0009f79f7d93a1268f"},
+ {file = "contourpy-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:da84c537cb8b97d153e9fb208c221c45605f73147bd4cadd23bdae915042aad6"},
+ {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0be4d8425bfa755e0fd76ee1e019636ccc7c29f77a7c86b4328a9eb6a26d0639"},
+ {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c0da700bf58f6e0b65312d0a5e695179a71d0163957fa381bb3c1f72972537c"},
+ {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb8b141bb00fa977d9122636b16aa67d37fd40a3d8b52dd837e536d64b9a4d06"},
+ {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3634b5385c6716c258d0419c46d05c8aa7dc8cb70326c9a4fb66b69ad2b52e09"},
+ {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0dce35502151b6bd35027ac39ba6e5a44be13a68f55735c3612c568cac3805fd"},
+ {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea348f053c645100612b333adc5983d87be69acdc6d77d3169c090d3b01dc35"},
+ {file = "contourpy-1.3.0-cp312-cp312-win32.whl", hash = "sha256:90f73a5116ad1ba7174341ef3ea5c3150ddf20b024b98fb0c3b29034752c8aeb"},
+ {file = "contourpy-1.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:b11b39aea6be6764f84360fce6c82211a9db32a7c7de8fa6dd5397cf1d079c3b"},
+ {file = "contourpy-1.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3e1c7fa44aaae40a2247e2e8e0627f4bea3dd257014764aa644f319a5f8600e3"},
+ {file = "contourpy-1.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:364174c2a76057feef647c802652f00953b575723062560498dc7930fc9b1cb7"},
+ {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32b238b3b3b649e09ce9aaf51f0c261d38644bdfa35cbaf7b263457850957a84"},
+ {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d51fca85f9f7ad0b65b4b9fe800406d0d77017d7270d31ec3fb1cc07358fdea0"},
+ {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:732896af21716b29ab3e988d4ce14bc5133733b85956316fb0c56355f398099b"},
+ {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d73f659398a0904e125280836ae6f88ba9b178b2fed6884f3b1f95b989d2c8da"},
+ {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c6c7c2408b7048082932cf4e641fa3b8ca848259212f51c8c59c45aa7ac18f14"},
+ {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f317576606de89da6b7e0861cf6061f6146ead3528acabff9236458a6ba467f8"},
+ {file = "contourpy-1.3.0-cp313-cp313-win32.whl", hash = "sha256:31cd3a85dbdf1fc002280c65caa7e2b5f65e4a973fcdf70dd2fdcb9868069294"},
+ {file = "contourpy-1.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:4553c421929ec95fb07b3aaca0fae668b2eb5a5203d1217ca7c34c063c53d087"},
+ {file = "contourpy-1.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:345af746d7766821d05d72cb8f3845dfd08dd137101a2cb9b24de277d716def8"},
+ {file = "contourpy-1.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3bb3808858a9dc68f6f03d319acd5f1b8a337e6cdda197f02f4b8ff67ad2057b"},
+ {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:420d39daa61aab1221567b42eecb01112908b2cab7f1b4106a52caaec8d36973"},
+ {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d63ee447261e963af02642ffcb864e5a2ee4cbfd78080657a9880b8b1868e18"},
+ {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:167d6c890815e1dac9536dca00828b445d5d0df4d6a8c6adb4a7ec3166812fa8"},
+ {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:710a26b3dc80c0e4febf04555de66f5fd17e9cf7170a7b08000601a10570bda6"},
+ {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:75ee7cb1a14c617f34a51d11fa7524173e56551646828353c4af859c56b766e2"},
+ {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:33c92cdae89ec5135d036e7218e69b0bb2851206077251f04a6c4e0e21f03927"},
+ {file = "contourpy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a11077e395f67ffc2c44ec2418cfebed032cd6da3022a94fc227b6faf8e2acb8"},
+ {file = "contourpy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e8134301d7e204c88ed7ab50028ba06c683000040ede1d617298611f9dc6240c"},
+ {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12968fdfd5bb45ffdf6192a590bd8ddd3ba9e58360b29683c6bb71a7b41edca"},
+ {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fd2a0fc506eccaaa7595b7e1418951f213cf8255be2600f1ea1b61e46a60c55f"},
+ {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfb5c62ce023dfc410d6059c936dcf96442ba40814aefbfa575425a3a7f19dc"},
+ {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68a32389b06b82c2fdd68276148d7b9275b5f5cf13e5417e4252f6d1a34f72a2"},
+ {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:94e848a6b83da10898cbf1311a815f770acc9b6a3f2d646f330d57eb4e87592e"},
+ {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d78ab28a03c854a873787a0a42254a0ccb3cb133c672f645c9f9c8f3ae9d0800"},
+ {file = "contourpy-1.3.0-cp39-cp39-win32.whl", hash = "sha256:81cb5ed4952aae6014bc9d0421dec7c5835c9c8c31cdf51910b708f548cf58e5"},
+ {file = "contourpy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:14e262f67bd7e6eb6880bc564dcda30b15e351a594657e55b7eec94b6ef72843"},
+ {file = "contourpy-1.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fe41b41505a5a33aeaed2a613dccaeaa74e0e3ead6dd6fd3a118fb471644fd6c"},
+ {file = "contourpy-1.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca7e17a65f72a5133bdbec9ecf22401c62bcf4821361ef7811faee695799779"},
+ {file = "contourpy-1.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ec4dc6bf570f5b22ed0d7efba0dfa9c5b9e0431aeea7581aa217542d9e809a4"},
+ {file = "contourpy-1.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:00ccd0dbaad6d804ab259820fa7cb0b8036bda0686ef844d24125d8287178ce0"},
+ {file = "contourpy-1.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca947601224119117f7c19c9cdf6b3ab54c5726ef1d906aa4a69dfb6dd58102"},
+ {file = "contourpy-1.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6ec93afeb848a0845a18989da3beca3eec2c0f852322efe21af1931147d12cb"},
+ {file = "contourpy-1.3.0.tar.gz", hash = "sha256:7ffa0db17717a8ffb127efd0c95a4362d996b892c2904db72428d5b52e1938a4"},
+]
+
+[package.dependencies]
+numpy = ">=1.23"
+
+[package.extras]
+bokeh = ["bokeh", "selenium"]
+docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"]
+mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.11.1)", "types-Pillow"]
+test = ["Pillow", "contourpy[test-no-images]", "matplotlib"]
+test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "wurlitzer"]
+
[[package]]
name = "coverage"
-version = "7.6.0"
+version = "7.6.2"
description = "Code coverage measurement for Python"
optional = true
-python-versions = ">=3.8"
+python-versions = ">=3.9"
files = [
- {file = "coverage-7.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dff044f661f59dace805eedb4a7404c573b6ff0cdba4a524141bc63d7be5c7fd"},
- {file = "coverage-7.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8659fd33ee9e6ca03950cfdcdf271d645cf681609153f218826dd9805ab585c"},
- {file = "coverage-7.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7792f0ab20df8071d669d929c75c97fecfa6bcab82c10ee4adb91c7a54055463"},
- {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b3cd1ca7cd73d229487fa5caca9e4bc1f0bca96526b922d61053ea751fe791"},
- {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7e128f85c0b419907d1f38e616c4f1e9f1d1b37a7949f44df9a73d5da5cd53c"},
- {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a94925102c89247530ae1dab7dc02c690942566f22e189cbd53579b0693c0783"},
- {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dcd070b5b585b50e6617e8972f3fbbee786afca71b1936ac06257f7e178f00f6"},
- {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d50a252b23b9b4dfeefc1f663c568a221092cbaded20a05a11665d0dbec9b8fb"},
- {file = "coverage-7.6.0-cp310-cp310-win32.whl", hash = "sha256:0e7b27d04131c46e6894f23a4ae186a6a2207209a05df5b6ad4caee6d54a222c"},
- {file = "coverage-7.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dece71673b3187c86226c3ca793c5f891f9fc3d8aa183f2e3653da18566169"},
- {file = "coverage-7.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7b525ab52ce18c57ae232ba6f7010297a87ced82a2383b1afd238849c1ff933"},
- {file = "coverage-7.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bea27c4269234e06f621f3fac3925f56ff34bc14521484b8f66a580aacc2e7d"},
- {file = "coverage-7.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed8d1d1821ba5fc88d4a4f45387b65de52382fa3ef1f0115a4f7a20cdfab0e94"},
- {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c322ef2bbe15057bc4bf132b525b7e3f7206f071799eb8aa6ad1940bcf5fb1"},
- {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03cafe82c1b32b770a29fd6de923625ccac3185a54a5e66606da26d105f37dac"},
- {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d1b923fc4a40c5832be4f35a5dab0e5ff89cddf83bb4174499e02ea089daf57"},
- {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4b03741e70fb811d1a9a1d75355cf391f274ed85847f4b78e35459899f57af4d"},
- {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a73d18625f6a8a1cbb11eadc1d03929f9510f4131879288e3f7922097a429f63"},
- {file = "coverage-7.6.0-cp311-cp311-win32.whl", hash = "sha256:65fa405b837060db569a61ec368b74688f429b32fa47a8929a7a2f9b47183713"},
- {file = "coverage-7.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:6379688fb4cfa921ae349c76eb1a9ab26b65f32b03d46bb0eed841fd4cb6afb1"},
- {file = "coverage-7.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f7db0b6ae1f96ae41afe626095149ecd1b212b424626175a6633c2999eaad45b"},
- {file = "coverage-7.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bbdf9a72403110a3bdae77948b8011f644571311c2fb35ee15f0f10a8fc082e8"},
- {file = "coverage-7.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc44bf0315268e253bf563f3560e6c004efe38f76db03a1558274a6e04bf5d5"},
- {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da8549d17489cd52f85a9829d0e1d91059359b3c54a26f28bec2c5d369524807"},
- {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0086cd4fc71b7d485ac93ca4239c8f75732c2ae3ba83f6be1c9be59d9e2c6382"},
- {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fad32ee9b27350687035cb5fdf9145bc9cf0a094a9577d43e909948ebcfa27b"},
- {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:044a0985a4f25b335882b0966625270a8d9db3d3409ddc49a4eb00b0ef5e8cee"},
- {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:76d5f82213aa78098b9b964ea89de4617e70e0d43e97900c2778a50856dac605"},
- {file = "coverage-7.6.0-cp312-cp312-win32.whl", hash = "sha256:3c59105f8d58ce500f348c5b56163a4113a440dad6daa2294b5052a10db866da"},
- {file = "coverage-7.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca5d79cfdae420a1d52bf177de4bc2289c321d6c961ae321503b2ca59c17ae67"},
- {file = "coverage-7.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d39bd10f0ae453554798b125d2f39884290c480f56e8a02ba7a6ed552005243b"},
- {file = "coverage-7.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beb08e8508e53a568811016e59f3234d29c2583f6b6e28572f0954a6b4f7e03d"},
- {file = "coverage-7.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2e16f4cd2bc4d88ba30ca2d3bbf2f21f00f382cf4e1ce3b1ddc96c634bc48ca"},
- {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6616d1c9bf1e3faea78711ee42a8b972367d82ceae233ec0ac61cc7fec09fa6b"},
- {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4567d6c334c46046d1c4c20024de2a1c3abc626817ae21ae3da600f5779b44"},
- {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d17c6a415d68cfe1091d3296ba5749d3d8696e42c37fca5d4860c5bf7b729f03"},
- {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9146579352d7b5f6412735d0f203bbd8d00113a680b66565e205bc605ef81bc6"},
- {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cdab02a0a941af190df8782aafc591ef3ad08824f97850b015c8c6a8b3877b0b"},
- {file = "coverage-7.6.0-cp38-cp38-win32.whl", hash = "sha256:df423f351b162a702c053d5dddc0fc0ef9a9e27ea3f449781ace5f906b664428"},
- {file = "coverage-7.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f2501d60d7497fd55e391f423f965bbe9e650e9ffc3c627d5f0ac516026000b8"},
- {file = "coverage-7.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7221f9ac9dad9492cecab6f676b3eaf9185141539d5c9689d13fd6b0d7de840c"},
- {file = "coverage-7.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddaaa91bfc4477d2871442bbf30a125e8fe6b05da8a0015507bfbf4718228ab2"},
- {file = "coverage-7.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cbe651f3904e28f3a55d6f371203049034b4ddbce65a54527a3f189ca3b390"},
- {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831b476d79408ab6ccfadaaf199906c833f02fdb32c9ab907b1d4aa0713cfa3b"},
- {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c3d091059ad0b9c59d1034de74a7f36dcfa7f6d3bde782c49deb42438f2450"},
- {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4d5fae0a22dc86259dee66f2cc6c1d3e490c4a1214d7daa2a93d07491c5c04b6"},
- {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:07ed352205574aad067482e53dd606926afebcb5590653121063fbf4e2175166"},
- {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:49c76cdfa13015c4560702574bad67f0e15ca5a2872c6a125f6327ead2b731dd"},
- {file = "coverage-7.6.0-cp39-cp39-win32.whl", hash = "sha256:482855914928c8175735a2a59c8dc5806cf7d8f032e4820d52e845d1f731dca2"},
- {file = "coverage-7.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:543ef9179bc55edfd895154a51792b01c017c87af0ebaae092720152e19e42ca"},
- {file = "coverage-7.6.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:6fe885135c8a479d3e37a7aae61cbd3a0fb2deccb4dda3c25f92a49189f766d6"},
- {file = "coverage-7.6.0.tar.gz", hash = "sha256:289cc803fa1dc901f84701ac10c9ee873619320f2f9aff38794db4a4a0268d51"},
+ {file = "coverage-7.6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c9df1950fb92d49970cce38100d7e7293c84ed3606eaa16ea0b6bc27175bb667"},
+ {file = "coverage-7.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:24500f4b0e03aab60ce575c85365beab64b44d4db837021e08339f61d1fbfe52"},
+ {file = "coverage-7.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a663b180b6669c400b4630a24cc776f23a992d38ce7ae72ede2a397ce6b0f170"},
+ {file = "coverage-7.6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfde025e2793a22efe8c21f807d276bd1d6a4bcc5ba6f19dbdfc4e7a12160909"},
+ {file = "coverage-7.6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:087932079c065d7b8ebadd3a0160656c55954144af6439886c8bcf78bbbcde7f"},
+ {file = "coverage-7.6.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9c6b0c1cafd96213a0327cf680acb39f70e452caf8e9a25aeb05316db9c07f89"},
+ {file = "coverage-7.6.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6e85830eed5b5263ffa0c62428e43cb844296f3b4461f09e4bdb0d44ec190bc2"},
+ {file = "coverage-7.6.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:62ab4231c01e156ece1b3a187c87173f31cbeee83a5e1f6dff17f288dca93345"},
+ {file = "coverage-7.6.2-cp310-cp310-win32.whl", hash = "sha256:7b80fbb0da3aebde102a37ef0138aeedff45997e22f8962e5f16ae1742852676"},
+ {file = "coverage-7.6.2-cp310-cp310-win_amd64.whl", hash = "sha256:d20c3d1f31f14d6962a4e2f549c21d31e670b90f777ef4171be540fb7fb70f02"},
+ {file = "coverage-7.6.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bb21bac7783c1bf6f4bbe68b1e0ff0d20e7e7732cfb7995bc8d96e23aa90fc7b"},
+ {file = "coverage-7.6.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a7b2e437fbd8fae5bc7716b9c7ff97aecc95f0b4d56e4ca08b3c8d8adcaadb84"},
+ {file = "coverage-7.6.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:536f77f2bf5797983652d1d55f1a7272a29afcc89e3ae51caa99b2db4e89d658"},
+ {file = "coverage-7.6.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f361296ca7054f0936b02525646b2731b32c8074ba6defab524b79b2b7eeac72"},
+ {file = "coverage-7.6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7926d8d034e06b479797c199747dd774d5e86179f2ce44294423327a88d66ca7"},
+ {file = "coverage-7.6.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0bbae11c138585c89fb4e991faefb174a80112e1a7557d507aaa07675c62e66b"},
+ {file = "coverage-7.6.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fcad7d5d2bbfeae1026b395036a8aa5abf67e8038ae7e6a25c7d0f88b10a8e6a"},
+ {file = "coverage-7.6.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f01e53575f27097d75d42de33b1b289c74b16891ce576d767ad8c48d17aeb5e0"},
+ {file = "coverage-7.6.2-cp311-cp311-win32.whl", hash = "sha256:7781f4f70c9b0b39e1b129b10c7d43a4e0c91f90c60435e6da8288efc2b73438"},
+ {file = "coverage-7.6.2-cp311-cp311-win_amd64.whl", hash = "sha256:9bcd51eeca35a80e76dc5794a9dd7cb04b97f0e8af620d54711793bfc1fbba4b"},
+ {file = "coverage-7.6.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ebc94fadbd4a3f4215993326a6a00e47d79889391f5659bf310f55fe5d9f581c"},
+ {file = "coverage-7.6.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9681516288e3dcf0aa7c26231178cc0be6cac9705cac06709f2353c5b406cfea"},
+ {file = "coverage-7.6.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d9c5d13927d77af4fbe453953810db766f75401e764727e73a6ee4f82527b3e"},
+ {file = "coverage-7.6.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b92f9ca04b3e719d69b02dc4a69debb795af84cb7afd09c5eb5d54b4a1ae2191"},
+ {file = "coverage-7.6.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ff2ef83d6d0b527b5c9dad73819b24a2f76fdddcfd6c4e7a4d7e73ecb0656b4"},
+ {file = "coverage-7.6.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:47ccb6e99a3031ffbbd6e7cc041e70770b4fe405370c66a54dbf26a500ded80b"},
+ {file = "coverage-7.6.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a867d26f06bcd047ef716175b2696b315cb7571ccb951006d61ca80bbc356e9e"},
+ {file = "coverage-7.6.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cdfcf2e914e2ba653101157458afd0ad92a16731eeba9a611b5cbb3e7124e74b"},
+ {file = "coverage-7.6.2-cp312-cp312-win32.whl", hash = "sha256:f9035695dadfb397bee9eeaf1dc7fbeda483bf7664a7397a629846800ce6e276"},
+ {file = "coverage-7.6.2-cp312-cp312-win_amd64.whl", hash = "sha256:5ed69befa9a9fc796fe015a7040c9398722d6b97df73a6b608e9e275fa0932b0"},
+ {file = "coverage-7.6.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4eea60c79d36a8f39475b1af887663bc3ae4f31289cd216f514ce18d5938df40"},
+ {file = "coverage-7.6.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa68a6cdbe1bc6793a9dbfc38302c11599bbe1837392ae9b1d238b9ef3dafcf1"},
+ {file = "coverage-7.6.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ec528ae69f0a139690fad6deac8a7d33629fa61ccce693fdd07ddf7e9931fba"},
+ {file = "coverage-7.6.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed5ac02126f74d190fa2cc14a9eb2a5d9837d5863920fa472b02eb1595cdc925"},
+ {file = "coverage-7.6.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21c0ea0d4db8a36b275cb6fb2437a3715697a4ba3cb7b918d3525cc75f726304"},
+ {file = "coverage-7.6.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:35a51598f29b2a19e26d0908bd196f771a9b1c5d9a07bf20be0adf28f1ad4f77"},
+ {file = "coverage-7.6.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c9192925acc33e146864b8cf037e2ed32a91fdf7644ae875f5d46cd2ef086a5f"},
+ {file = "coverage-7.6.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bf4eeecc9e10f5403ec06138978235af79c9a79af494eb6b1d60a50b49ed2869"},
+ {file = "coverage-7.6.2-cp313-cp313-win32.whl", hash = "sha256:e4ee15b267d2dad3e8759ca441ad450c334f3733304c55210c2a44516e8d5530"},
+ {file = "coverage-7.6.2-cp313-cp313-win_amd64.whl", hash = "sha256:c71965d1ced48bf97aab79fad56df82c566b4c498ffc09c2094605727c4b7e36"},
+ {file = "coverage-7.6.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:7571e8bbecc6ac066256f9de40365ff833553e2e0c0c004f4482facb131820ef"},
+ {file = "coverage-7.6.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:078a87519057dacb5d77e333f740708ec2a8f768655f1db07f8dfd28d7a005f0"},
+ {file = "coverage-7.6.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e5e92e3e84a8718d2de36cd8387459cba9a4508337b8c5f450ce42b87a9e760"},
+ {file = "coverage-7.6.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ebabdf1c76593a09ee18c1a06cd3022919861365219ea3aca0247ededf6facd6"},
+ {file = "coverage-7.6.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12179eb0575b8900912711688e45474f04ab3934aaa7b624dea7b3c511ecc90f"},
+ {file = "coverage-7.6.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:39d3b964abfe1519b9d313ab28abf1d02faea26cd14b27f5283849bf59479ff5"},
+ {file = "coverage-7.6.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:84c4315577f7cd511d6250ffd0f695c825efe729f4205c0340f7004eda51191f"},
+ {file = "coverage-7.6.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ff797320dcbff57caa6b2301c3913784a010e13b1f6cf4ab3f563f3c5e7919db"},
+ {file = "coverage-7.6.2-cp313-cp313t-win32.whl", hash = "sha256:2b636a301e53964550e2f3094484fa5a96e699db318d65398cfba438c5c92171"},
+ {file = "coverage-7.6.2-cp313-cp313t-win_amd64.whl", hash = "sha256:d03a060ac1a08e10589c27d509bbdb35b65f2d7f3f8d81cf2fa199877c7bc58a"},
+ {file = "coverage-7.6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c37faddc8acd826cfc5e2392531aba734b229741d3daec7f4c777a8f0d4993e5"},
+ {file = "coverage-7.6.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab31fdd643f162c467cfe6a86e9cb5f1965b632e5e65c072d90854ff486d02cf"},
+ {file = "coverage-7.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97df87e1a20deb75ac7d920c812e9326096aa00a9a4b6d07679b4f1f14b06c90"},
+ {file = "coverage-7.6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:343056c5e0737487a5291f5691f4dfeb25b3e3c8699b4d36b92bb0e586219d14"},
+ {file = "coverage-7.6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4ef1c56b47b6b9024b939d503ab487231df1f722065a48f4fc61832130b90e"},
+ {file = "coverage-7.6.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fca4a92c8a7a73dee6946471bce6d1443d94155694b893b79e19ca2a540d86e"},
+ {file = "coverage-7.6.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:69f251804e052fc46d29d0e7348cdc5fcbfc4861dc4a1ebedef7e78d241ad39e"},
+ {file = "coverage-7.6.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e8ea055b3ea046c0f66217af65bc193bbbeca1c8661dc5fd42698db5795d2627"},
+ {file = "coverage-7.6.2-cp39-cp39-win32.whl", hash = "sha256:6c2ba1e0c24d8fae8f2cf0aeb2fc0a2a7f69b6d20bd8d3749fd6b36ecef5edf0"},
+ {file = "coverage-7.6.2-cp39-cp39-win_amd64.whl", hash = "sha256:2186369a654a15628e9c1c9921409a6b3eda833e4b91f3ca2a7d9f77abb4987c"},
+ {file = "coverage-7.6.2-pp39.pp310-none-any.whl", hash = "sha256:667952739daafe9616db19fbedbdb87917eee253ac4f31d70c7587f7ab531b4e"},
+ {file = "coverage-7.6.2.tar.gz", hash = "sha256:a5f81e68aa62bc0cfca04f7b19eaa8f9c826b53fc82ab9e2121976dc74f131f3"},
]
[package.dependencies]
@@ -520,108 +918,206 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1
[package.extras]
toml = ["tomli"]
+[[package]]
+name = "cycler"
+version = "0.12.1"
+description = "Composable style cycles"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"},
+ {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"},
+]
+
+[package.extras]
+docs = ["ipython", "matplotlib", "numpydoc", "sphinx"]
+tests = ["pytest", "pytest-cov", "pytest-xdist"]
+
+[[package]]
+name = "dash"
+version = "2.9.3"
+description = "A Python framework for building reactive web-apps. Developed by Plotly."
+optional = true
+python-versions = ">=3.6"
+files = [
+ {file = "dash-2.9.3-py3-none-any.whl", hash = "sha256:a749ae1ea9de3fe7b785353a818ec9b629d39c6b7e02462954203bd1e296fd0e"},
+ {file = "dash-2.9.3.tar.gz", hash = "sha256:47392f8d6455dc989a697407eb5941f3bad80604df985ab1ac9d4244568ffb34"},
+]
+
+[package.dependencies]
+dash-core-components = "2.0.0"
+dash-html-components = "2.0.0"
+dash-table = "5.0.0"
+Flask = ">=1.0.4"
+plotly = ">=5.0.0"
+
+[package.extras]
+celery = ["celery[redis] (>=5.1.2)", "importlib-metadata (<5)", "redis (>=3.5.3)"]
+ci = ["black (==21.6b0)", "black (==22.3.0)", "dash-dangerously-set-inner-html", "dash-flow-example (==0.0.5)", "flake8 (==3.9.2)", "flaky (==3.7.0)", "flask-talisman (==1.0.0)", "isort (==4.3.21)", "mimesis", "mock (==4.0.3)", "numpy", "openpyxl", "orjson (==3.5.4)", "orjson (==3.6.7)", "pandas (==1.1.5)", "pandas (>=1.4.0)", "preconditions", "pyarrow", "pyarrow (<3)", "pylint (==2.13.5)", "pytest-mock", "pytest-rerunfailures", "pytest-sugar (==0.9.6)", "xlrd (<2)", "xlrd (>=2.0.1)"]
+compress = ["flask-compress"]
+dev = ["PyYAML (>=5.4.1)", "coloredlogs (>=15.0.1)", "fire (>=0.4.0)"]
+diskcache = ["diskcache (>=5.2.1)", "multiprocess (>=0.70.12)", "psutil (>=5.8.0)"]
+testing = ["beautifulsoup4 (>=4.8.2)", "cryptography (<3.4)", "dash-testing-stub (>=0.0.2)", "lxml (>=4.6.2)", "multiprocess (>=0.70.12)", "percy (>=2.0.2)", "psutil (>=5.8.0)", "pytest (>=6.0.2)", "requests[security] (>=2.21.0)", "selenium (>=3.141.0,<=4.2.0)", "waitress (>=1.4.4)"]
+
+[[package]]
+name = "dash-core-components"
+version = "2.0.0"
+description = "Core component suite for Dash"
+optional = true
+python-versions = "*"
+files = [
+ {file = "dash_core_components-2.0.0-py3-none-any.whl", hash = "sha256:52b8e8cce13b18d0802ee3acbc5e888cb1248a04968f962d63d070400af2e346"},
+ {file = "dash_core_components-2.0.0.tar.gz", hash = "sha256:c6733874af975e552f95a1398a16c2ee7df14ce43fa60bb3718a3c6e0b63ffee"},
+]
+
+[[package]]
+name = "dash-html-components"
+version = "2.0.0"
+description = "Vanilla HTML components for Dash"
+optional = true
+python-versions = "*"
+files = [
+ {file = "dash_html_components-2.0.0-py3-none-any.whl", hash = "sha256:b42cc903713c9706af03b3f2548bda4be7307a7cf89b7d6eae3da872717d1b63"},
+ {file = "dash_html_components-2.0.0.tar.gz", hash = "sha256:8703a601080f02619a6390998e0b3da4a5daabe97a1fd7a9cebc09d015f26e50"},
+]
+
+[[package]]
+name = "dash-table"
+version = "5.0.0"
+description = "Dash table"
+optional = true
+python-versions = "*"
+files = [
+ {file = "dash_table-5.0.0-py3-none-any.whl", hash = "sha256:19036fa352bb1c11baf38068ec62d172f0515f73ca3276c79dee49b95ddc16c9"},
+ {file = "dash_table-5.0.0.tar.gz", hash = "sha256:18624d693d4c8ef2ddec99a6f167593437a7ea0bf153aa20f318c170c5bc7308"},
+]
+
[[package]]
name = "datasets"
-version = "2.20.0"
+version = "3.0.1"
description = "HuggingFace community-driven open-source library of datasets"
optional = false
python-versions = ">=3.8.0"
files = [
- {file = "datasets-2.20.0-py3-none-any.whl", hash = "sha256:76ac02e3bdfff824492e20678f0b6b1b6d080515957fe834b00c2ba8d6b18e5e"},
- {file = "datasets-2.20.0.tar.gz", hash = "sha256:3c4dbcd27e0f642b9d41d20ff2efa721a5e04b32b2ca4009e0fc9139e324553f"},
+ {file = "datasets-3.0.1-py3-none-any.whl", hash = "sha256:db080aab41c8cc68645117a0f172e5c6789cbc672f066de0aa5a08fc3eebc686"},
+ {file = "datasets-3.0.1.tar.gz", hash = "sha256:40d63b09e76a3066c32e746d6fdc36fd3f29ed2acd49bf5b1a2100da32936511"},
]
[package.dependencies]
aiohttp = "*"
dill = ">=0.3.0,<0.3.9"
filelock = "*"
-fsspec = {version = ">=2023.1.0,<=2024.5.0", extras = ["http"]}
-huggingface-hub = ">=0.21.2"
+fsspec = {version = ">=2023.1.0,<=2024.6.1", extras = ["http"]}
+huggingface-hub = ">=0.22.0"
multiprocess = "*"
numpy = ">=1.17"
packaging = "*"
pandas = "*"
pyarrow = ">=15.0.0"
-pyarrow-hotfix = "*"
pyyaml = ">=5.1"
requests = ">=2.32.2"
tqdm = ">=4.66.3"
xxhash = "*"
[package.extras]
-apache-beam = ["apache-beam (>=2.26.0)"]
-audio = ["librosa", "soundfile (>=0.12.1)"]
+audio = ["librosa", "soundfile (>=0.12.1)", "soxr (>=0.4.0)"]
benchmarks = ["tensorflow (==2.12.0)", "torch (==2.0.1)", "transformers (==4.30.1)"]
-dev = ["Pillow (>=9.4.0)", "absl-py", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "polars[timezone] (>=0.20.0)", "protobuf (<4.0.0)", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "ruff (>=0.3.0)", "s3fs", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy", "tensorflow (>=2.6.0)", "tiktoken", "torch", "torch (>=2.0.0)", "transformers", "typing-extensions (>=4.6.1)", "zstandard"]
+dev = ["Pillow (>=9.4.0)", "absl-py", "decorator", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.8.0.post1)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "moto[server]", "polars[timezone] (>=0.20.0)", "protobuf (<4.0.0)", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "ruff (>=0.3.0)", "s3fs", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "soxr (>=0.4.0)", "sqlalchemy", "tensorflow (>=2.16.0)", "tensorflow (>=2.6.0)", "tensorflow (>=2.6.0)", "tiktoken", "torch", "torch (>=2.0.0)", "torchdata", "transformers", "transformers (>=4.42.0)", "zstandard"]
docs = ["s3fs", "tensorflow (>=2.6.0)", "torch", "transformers"]
jax = ["jax (>=0.3.14)", "jaxlib (>=0.3.14)"]
-metrics-tests = ["Werkzeug (>=1.0.1)", "accelerate", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "requests-file (>=1.5.1)", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "spacy (>=3.0.0)", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "typer (<0.5.0)"]
quality = ["ruff (>=0.3.0)"]
s3 = ["s3fs"]
tensorflow = ["tensorflow (>=2.6.0)"]
tensorflow-gpu = ["tensorflow (>=2.6.0)"]
-tests = ["Pillow (>=9.4.0)", "absl-py", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "polars[timezone] (>=0.20.0)", "protobuf (<4.0.0)", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "sqlalchemy", "tensorflow (>=2.6.0)", "tiktoken", "torch (>=2.0.0)", "transformers", "typing-extensions (>=4.6.1)", "zstandard"]
+tests = ["Pillow (>=9.4.0)", "absl-py", "decorator", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.8.0.post1)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "librosa", "lz4", "moto[server]", "polars[timezone] (>=0.20.0)", "protobuf (<4.0.0)", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "soxr (>=0.4.0)", "sqlalchemy", "tensorflow (>=2.16.0)", "tensorflow (>=2.6.0)", "tiktoken", "torch (>=2.0.0)", "torchdata", "transformers (>=4.42.0)", "zstandard"]
+tests-numpy2 = ["Pillow (>=9.4.0)", "absl-py", "decorator", "elasticsearch (<8.0.0)", "jax (>=0.3.14)", "jaxlib (>=0.3.14)", "joblib (<1.3.0)", "joblibspark", "lz4", "moto[server]", "polars[timezone] (>=0.20.0)", "protobuf (<4.0.0)", "py7zr", "pyspark (>=3.4)", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile (>=0.12.1)", "soxr (>=0.4.0)", "sqlalchemy", "tiktoken", "torch (>=2.0.0)", "torchdata", "transformers (>=4.42.0)", "zstandard"]
torch = ["torch"]
vision = ["Pillow (>=9.4.0)"]
[[package]]
name = "debugpy"
-version = "1.8.2"
+version = "1.8.7"
description = "An implementation of the Debug Adapter Protocol for Python"
optional = true
python-versions = ">=3.8"
files = [
- {file = "debugpy-1.8.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:7ee2e1afbf44b138c005e4380097d92532e1001580853a7cb40ed84e0ef1c3d2"},
- {file = "debugpy-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f8c3f7c53130a070f0fc845a0f2cee8ed88d220d6b04595897b66605df1edd6"},
- {file = "debugpy-1.8.2-cp310-cp310-win32.whl", hash = "sha256:f179af1e1bd4c88b0b9f0fa153569b24f6b6f3de33f94703336363ae62f4bf47"},
- {file = "debugpy-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:0600faef1d0b8d0e85c816b8bb0cb90ed94fc611f308d5fde28cb8b3d2ff0fe3"},
- {file = "debugpy-1.8.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:8a13417ccd5978a642e91fb79b871baded925d4fadd4dfafec1928196292aa0a"},
- {file = "debugpy-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acdf39855f65c48ac9667b2801234fc64d46778021efac2de7e50907ab90c634"},
- {file = "debugpy-1.8.2-cp311-cp311-win32.whl", hash = "sha256:2cbd4d9a2fc5e7f583ff9bf11f3b7d78dfda8401e8bb6856ad1ed190be4281ad"},
- {file = "debugpy-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:d3408fddd76414034c02880e891ea434e9a9cf3a69842098ef92f6e809d09afa"},
- {file = "debugpy-1.8.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:5d3ccd39e4021f2eb86b8d748a96c766058b39443c1f18b2dc52c10ac2757835"},
- {file = "debugpy-1.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62658aefe289598680193ff655ff3940e2a601765259b123dc7f89c0239b8cd3"},
- {file = "debugpy-1.8.2-cp312-cp312-win32.whl", hash = "sha256:bd11fe35d6fd3431f1546d94121322c0ac572e1bfb1f6be0e9b8655fb4ea941e"},
- {file = "debugpy-1.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:15bc2f4b0f5e99bf86c162c91a74c0631dbd9cef3c6a1d1329c946586255e859"},
- {file = "debugpy-1.8.2-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:5a019d4574afedc6ead1daa22736c530712465c0c4cd44f820d803d937531b2d"},
- {file = "debugpy-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40f062d6877d2e45b112c0bbade9a17aac507445fd638922b1a5434df34aed02"},
- {file = "debugpy-1.8.2-cp38-cp38-win32.whl", hash = "sha256:c78ba1680f1015c0ca7115671fe347b28b446081dada3fedf54138f44e4ba031"},
- {file = "debugpy-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:cf327316ae0c0e7dd81eb92d24ba8b5e88bb4d1b585b5c0d32929274a66a5210"},
- {file = "debugpy-1.8.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:1523bc551e28e15147815d1397afc150ac99dbd3a8e64641d53425dba57b0ff9"},
- {file = "debugpy-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e24ccb0cd6f8bfaec68d577cb49e9c680621c336f347479b3fce060ba7c09ec1"},
- {file = "debugpy-1.8.2-cp39-cp39-win32.whl", hash = "sha256:7f8d57a98c5a486c5c7824bc0b9f2f11189d08d73635c326abef268f83950326"},
- {file = "debugpy-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:16c8dcab02617b75697a0a925a62943e26a0330da076e2a10437edd9f0bf3755"},
- {file = "debugpy-1.8.2-py2.py3-none-any.whl", hash = "sha256:16e16df3a98a35c63c3ab1e4d19be4cbc7fdda92d9ddc059294f18910928e0ca"},
- {file = "debugpy-1.8.2.zip", hash = "sha256:95378ed08ed2089221896b9b3a8d021e642c24edc8fef20e5d4342ca8be65c00"},
+ {file = "debugpy-1.8.7-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:95fe04a573b8b22896c404365e03f4eda0ce0ba135b7667a1e57bd079793b96b"},
+ {file = "debugpy-1.8.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:628a11f4b295ffb4141d8242a9bb52b77ad4a63a2ad19217a93be0f77f2c28c9"},
+ {file = "debugpy-1.8.7-cp310-cp310-win32.whl", hash = "sha256:85ce9c1d0eebf622f86cc68618ad64bf66c4fc3197d88f74bb695a416837dd55"},
+ {file = "debugpy-1.8.7-cp310-cp310-win_amd64.whl", hash = "sha256:29e1571c276d643757ea126d014abda081eb5ea4c851628b33de0c2b6245b037"},
+ {file = "debugpy-1.8.7-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:caf528ff9e7308b74a1749c183d6808ffbedbb9fb6af78b033c28974d9b8831f"},
+ {file = "debugpy-1.8.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cba1d078cf2e1e0b8402e6bda528bf8fda7ccd158c3dba6c012b7897747c41a0"},
+ {file = "debugpy-1.8.7-cp311-cp311-win32.whl", hash = "sha256:171899588bcd412151e593bd40d9907133a7622cd6ecdbdb75f89d1551df13c2"},
+ {file = "debugpy-1.8.7-cp311-cp311-win_amd64.whl", hash = "sha256:6e1c4ffb0c79f66e89dfd97944f335880f0d50ad29525dc792785384923e2211"},
+ {file = "debugpy-1.8.7-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:4d27d842311353ede0ad572600c62e4bcd74f458ee01ab0dd3a1a4457e7e3706"},
+ {file = "debugpy-1.8.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c1fd62ae0356e194f3e7b7a92acd931f71fe81c4b3be2c17a7b8a4b546ec2"},
+ {file = "debugpy-1.8.7-cp312-cp312-win32.whl", hash = "sha256:2f729228430ef191c1e4df72a75ac94e9bf77413ce5f3f900018712c9da0aaca"},
+ {file = "debugpy-1.8.7-cp312-cp312-win_amd64.whl", hash = "sha256:45c30aaefb3e1975e8a0258f5bbd26cd40cde9bfe71e9e5a7ac82e79bad64e39"},
+ {file = "debugpy-1.8.7-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:d050a1ec7e925f514f0f6594a1e522580317da31fbda1af71d1530d6ea1f2b40"},
+ {file = "debugpy-1.8.7-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f4349a28e3228a42958f8ddaa6333d6f8282d5edaea456070e48609c5983b7"},
+ {file = "debugpy-1.8.7-cp313-cp313-win32.whl", hash = "sha256:11ad72eb9ddb436afb8337891a986302e14944f0f755fd94e90d0d71e9100bba"},
+ {file = "debugpy-1.8.7-cp313-cp313-win_amd64.whl", hash = "sha256:2efb84d6789352d7950b03d7f866e6d180284bc02c7e12cb37b489b7083d81aa"},
+ {file = "debugpy-1.8.7-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:4b908291a1d051ef3331484de8e959ef3e66f12b5e610c203b5b75d2725613a7"},
+ {file = "debugpy-1.8.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da8df5b89a41f1fd31503b179d0a84a5fdb752dddd5b5388dbd1ae23cda31ce9"},
+ {file = "debugpy-1.8.7-cp38-cp38-win32.whl", hash = "sha256:b12515e04720e9e5c2216cc7086d0edadf25d7ab7e3564ec8b4521cf111b4f8c"},
+ {file = "debugpy-1.8.7-cp38-cp38-win_amd64.whl", hash = "sha256:93176e7672551cb5281577cdb62c63aadc87ec036f0c6a486f0ded337c504596"},
+ {file = "debugpy-1.8.7-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:90d93e4f2db442f8222dec5ec55ccfc8005821028982f1968ebf551d32b28907"},
+ {file = "debugpy-1.8.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6db2a370e2700557a976eaadb16243ec9c91bd46f1b3bb15376d7aaa7632c81"},
+ {file = "debugpy-1.8.7-cp39-cp39-win32.whl", hash = "sha256:a6cf2510740e0c0b4a40330640e4b454f928c7b99b0c9dbf48b11efba08a8cda"},
+ {file = "debugpy-1.8.7-cp39-cp39-win_amd64.whl", hash = "sha256:6a9d9d6d31846d8e34f52987ee0f1a904c7baa4912bf4843ab39dadf9b8f3e0d"},
+ {file = "debugpy-1.8.7-py2.py3-none-any.whl", hash = "sha256:57b00de1c8d2c84a61b90880f7e5b6deaf4c312ecbde3a0e8912f2a56c4ac9ae"},
+ {file = "debugpy-1.8.7.zip", hash = "sha256:18b8f731ed3e2e1df8e9cdaa23fb1fc9c24e570cd0081625308ec51c82efe42e"},
+]
+
+[[package]]
+name = "decorator"
+version = "5.1.1"
+description = "Decorators for Humans"
+optional = true
+python-versions = ">=3.5"
+files = [
+ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
+ {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
]
[[package]]
name = "deepdiff"
-version = "7.0.1"
+version = "8.0.1"
description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other."
optional = false
python-versions = ">=3.8"
files = [
- {file = "deepdiff-7.0.1-py3-none-any.whl", hash = "sha256:447760081918216aa4fd4ca78a4b6a848b81307b2ea94c810255334b759e1dc3"},
- {file = "deepdiff-7.0.1.tar.gz", hash = "sha256:260c16f052d4badbf60351b4f77e8390bee03a0b516246f6839bc813fb429ddf"},
+ {file = "deepdiff-8.0.1-py3-none-any.whl", hash = "sha256:42e99004ce603f9a53934c634a57b04ad5900e0d8ed0abb15e635767489cbc05"},
+ {file = "deepdiff-8.0.1.tar.gz", hash = "sha256:245599a4586ab59bb599ca3517a9c42f3318ff600ded5e80a3432693c8ec3c4b"},
]
[package.dependencies]
-ordered-set = ">=4.1.0,<4.2.0"
+orderly-set = "5.2.2"
[package.extras]
cli = ["click (==8.1.7)", "pyyaml (==6.0.1)"]
optimize = ["orjson"]
+[[package]]
+name = "defusedxml"
+version = "0.7.1"
+description = "XML bomb protection for Python stdlib modules"
+optional = true
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"},
+ {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"},
+]
+
[[package]]
name = "diffusers"
-version = "0.29.2"
+version = "0.30.3"
description = "State-of-the-art diffusion in PyTorch and JAX."
optional = false
python-versions = ">=3.8.0"
files = [
- {file = "diffusers-0.29.2-py3-none-any.whl", hash = "sha256:d5e9bb13c8097b4eed10df23d1294d2e5a418f53e3f89c7ef228b5b982970428"},
- {file = "diffusers-0.29.2.tar.gz", hash = "sha256:b85f277668e22089cf68b40dd9b76940db7d24ba9cdac107533ed10ab8e4e9db"},
+ {file = "diffusers-0.30.3-py3-none-any.whl", hash = "sha256:1b70209e4d2c61223b96a7e13bc4d70869c8b0b68f54a35ce3a67fcf813edeee"},
+ {file = "diffusers-0.30.3.tar.gz", hash = "sha256:67c5eb25d5b50bf0742624ef43fe0f6d1e1604f64aad3e8558469cbe89ecf72f"},
]
[package.dependencies]
@@ -635,13 +1131,13 @@ requests = "*"
safetensors = ">=0.3.1"
[package.extras]
-dev = ["GitPython (<3.1.19)", "Jinja2", "accelerate (>=0.29.3)", "compel (==0.1.8)", "datasets", "flax (>=0.4.1)", "hf-doc-builder (>=0.3.0)", "invisible-watermark (>=0.2.0)", "isort (>=5.5.4)", "jax (>=0.4.1)", "jaxlib (>=0.4.1)", "k-diffusion (>=0.0.12)", "librosa", "parameterized", "peft (>=0.6.0)", "protobuf (>=3.20.3,<4)", "pytest", "pytest-timeout", "pytest-xdist", "requests-mock (==1.10.0)", "ruff (==0.1.5)", "safetensors (>=0.3.1)", "scipy", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "torch (>=1.4)", "torchvision", "transformers (>=4.25.1)", "urllib3 (<=2.0.0)"]
+dev = ["GitPython (<3.1.19)", "Jinja2", "accelerate (>=0.31.0)", "compel (==0.1.8)", "datasets", "flax (>=0.4.1)", "hf-doc-builder (>=0.3.0)", "invisible-watermark (>=0.2.0)", "isort (>=5.5.4)", "jax (>=0.4.1)", "jaxlib (>=0.4.1)", "k-diffusion (>=0.0.12)", "librosa", "parameterized", "peft (>=0.6.0)", "protobuf (>=3.20.3,<4)", "pytest", "pytest-timeout", "pytest-xdist", "requests-mock (==1.10.0)", "ruff (==0.1.5)", "safetensors (>=0.3.1)", "scipy", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorboard", "torch (>=1.4)", "torchvision", "transformers (>=4.41.2)", "urllib3 (<=2.0.0)"]
docs = ["hf-doc-builder (>=0.3.0)"]
flax = ["flax (>=0.4.1)", "jax (>=0.4.1)", "jaxlib (>=0.4.1)"]
quality = ["hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "ruff (==0.1.5)", "urllib3 (<=2.0.0)"]
-test = ["GitPython (<3.1.19)", "Jinja2", "compel (==0.1.8)", "datasets", "invisible-watermark (>=0.2.0)", "k-diffusion (>=0.0.12)", "librosa", "parameterized", "pytest", "pytest-timeout", "pytest-xdist", "requests-mock (==1.10.0)", "safetensors (>=0.3.1)", "scipy", "sentencepiece (>=0.1.91,!=0.1.92)", "torchvision", "transformers (>=4.25.1)"]
-torch = ["accelerate (>=0.29.3)", "torch (>=1.4)"]
-training = ["Jinja2", "accelerate (>=0.29.3)", "datasets", "peft (>=0.6.0)", "protobuf (>=3.20.3,<4)", "tensorboard"]
+test = ["GitPython (<3.1.19)", "Jinja2", "compel (==0.1.8)", "datasets", "invisible-watermark (>=0.2.0)", "k-diffusion (>=0.0.12)", "librosa", "parameterized", "pytest", "pytest-timeout", "pytest-xdist", "requests-mock (==1.10.0)", "safetensors (>=0.3.1)", "scipy", "sentencepiece (>=0.1.91,!=0.1.92)", "torchvision", "transformers (>=4.41.2)"]
+torch = ["accelerate (>=0.31.0)", "torch (>=1.4)"]
+training = ["Jinja2", "accelerate (>=0.31.0)", "datasets", "peft (>=0.6.0)", "protobuf (>=3.20.3,<4)", "tensorboard"]
[[package]]
name = "dill"
@@ -660,13 +1156,13 @@ profile = ["gprof2dot (>=2022.7.29)"]
[[package]]
name = "distlib"
-version = "0.3.8"
+version = "0.3.9"
description = "Distribution utilities"
optional = true
python-versions = "*"
files = [
- {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"},
- {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"},
+ {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"},
+ {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"},
]
[[package]]
@@ -787,26 +1283,40 @@ six = ">=1.4.0"
[[package]]
name = "dora-rs"
-version = "0.3.5"
+version = "0.3.6"
description = "`dora` goal is to be a low latency, composable, and distributed data flow."
optional = true
python-versions = "*"
files = [
- {file = "dora_rs-0.3.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:01f811d0c6722f74743c153a7be0144686daeafa968c473e60f6b6c5dc8f5bff"},
- {file = "dora_rs-0.3.5-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:a36e97d31eeb66e6d5913130695d188ceee1248029961012a8b4f59fd3f58670"},
- {file = "dora_rs-0.3.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25d620123a733661dc740ef2b456601ddbaa69ae2b50d8141daa3c684bda385c"},
- {file = "dora_rs-0.3.5-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a9fdc4e73578bebb1c8d0f8bea2243a5a9e179f08c74d98576123b59b75e5cac"},
- {file = "dora_rs-0.3.5-cp37-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e65830634c58158557f0ab90e5d1f492bcbc6b74587b05825ba4c20b634dc1bd"},
- {file = "dora_rs-0.3.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c01f9ab8f93295341aeab2d606d484d9cff9d05f57581e2180433ec8e0d38307"},
- {file = "dora_rs-0.3.5-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:5d6d46a49a34cd7e4f74496a1089b9a1b78282c219a28d98fe031a763e92d530"},
- {file = "dora_rs-0.3.5-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:bb888db22f63a7cc6ed6a287827d03a94e80f3668297b9c80169d393b99b5e6d"},
- {file = "dora_rs-0.3.5-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:c51284263fc72c936bd735b0a9c46303c5bda8c2000cb1cb443c8cf54c1f7ff3"},
- {file = "dora_rs-0.3.5-cp37-abi3-win_amd64.whl", hash = "sha256:88b4fe5e5569562fcdb3817abb89532f4abca913e8bd02e4ec228833716cbd09"},
+ {file = "dora_rs-0.3.6-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:c036d2d0792d8d6e0e9db1936ab5fd4c6d19e097f3fc259058733e526f94253a"},
+ {file = "dora_rs-0.3.6-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:95036f6fcb5aeb7bba8a1f37d84c627eefe09af1db17e36bc19209e950652446"},
+ {file = "dora_rs-0.3.6-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b5ef774dbafbdf8bda56939c6475916b7ec8f4b0c57c5b80f1d46eb642f5d07"},
+ {file = "dora_rs-0.3.6-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:78656d3ae1282a142a5fed410ec3a6f725fdf8d9f9192ed673e336ea3b083e12"},
+ {file = "dora_rs-0.3.6-cp37-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:681e22c8ecb3b48d11cb9019f8a32d4ae1e353e20d4ce3a0f0eedd0ccbd95e5f"},
+ {file = "dora_rs-0.3.6-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4598572bab6f726ec41fabb43bf0f7e3cf8082ea0f6f8f4e57845a6c919f31b3"},
+ {file = "dora_rs-0.3.6-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:297350f05f5f87a0bf647a1e5b4446728e5f800788c6bb28b462bcd167f1de7f"},
+ {file = "dora_rs-0.3.6-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:b1870a8e30f0ac298d17fd546224348d13a648bcfa0cbc51dba7e5136c1af928"},
+ {file = "dora_rs-0.3.6-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:182a189212d41be0c960fd3299bf6731af2e771f8858cfb1be7ebcc17d60a254"},
+ {file = "dora_rs-0.3.6-cp37-abi3-win_amd64.whl", hash = "sha256:a8f9343073e3fbca6bff3f0a13e5d2feabbe841a985c49e4294f7c14eb747bb5"},
]
[package.dependencies]
pyarrow = "*"
+[[package]]
+name = "drawnow"
+version = "0.72.5"
+description = "MATLAB-like drawnow"
+optional = true
+python-versions = "*"
+files = [
+ {file = "drawnow-0.72.5-py3-none-any.whl", hash = "sha256:4ff83a8b15f61a781edaaa2a3e6b71e2c8fd948960f188b870def701afcfa0d5"},
+ {file = "drawnow-0.72.5.tar.gz", hash = "sha256:9d1855605b2ec6ebc4e8a95201a7a0068eb1e2a5d1695cb1b7c462d660f32593"},
+]
+
+[package.dependencies]
+matplotlib = ">=1.5"
+
[[package]]
name = "dynamixel-sdk"
version = "3.7.31"
@@ -855,6 +1365,20 @@ files = [
[package.extras]
test = ["pytest (>=6)"]
+[[package]]
+name = "executing"
+version = "2.1.0"
+description = "Get the currently executing AST node of a frame, and other information"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"},
+ {file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"},
+]
+
+[package.extras]
+tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"]
+
[[package]]
name = "farama-notifications"
version = "0.0.4"
@@ -877,21 +1401,48 @@ files = [
{file = "fasteners-0.19.tar.gz", hash = "sha256:b4f37c3ac52d8a445af3a66bce57b33b5e90b97c696b7b984f530cf8f0ded09c"},
]
+[[package]]
+name = "fastjsonschema"
+version = "2.20.0"
+description = "Fastest Python implementation of JSON schema"
+optional = true
+python-versions = "*"
+files = [
+ {file = "fastjsonschema-2.20.0-py3-none-any.whl", hash = "sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a"},
+ {file = "fastjsonschema-2.20.0.tar.gz", hash = "sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23"},
+]
+
+[package.extras]
+devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"]
+
+[[package]]
+name = "feetech-servo-sdk"
+version = "1.0.0"
+description = "This is source code from official feetech repository"
+optional = true
+python-versions = "*"
+files = [
+ {file = "feetech-servo-sdk-1.0.0.tar.gz", hash = "sha256:d4d3832e4b1b22a8222133a414db9f868224c2fb639426a1b11d96ddfe84e69c"},
+]
+
+[package.dependencies]
+pyserial = "*"
+
[[package]]
name = "filelock"
-version = "3.15.4"
+version = "3.16.1"
description = "A platform independent file lock."
optional = false
python-versions = ">=3.8"
files = [
- {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"},
- {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"},
+ {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"},
+ {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"},
]
[package.extras]
-docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
-testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"]
-typing = ["typing-extensions (>=4.8)"]
+docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"]
+testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"]
+typing = ["typing-extensions (>=4.12.2)"]
[[package]]
name = "flask"
@@ -915,6 +1466,104 @@ Werkzeug = ">=3.0.0"
async = ["asgiref (>=3.2)"]
dotenv = ["python-dotenv"]
+[[package]]
+name = "fonttools"
+version = "4.54.1"
+description = "Tools to manipulate font files"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "fonttools-4.54.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ed7ee041ff7b34cc62f07545e55e1468808691dddfd315d51dd82a6b37ddef2"},
+ {file = "fonttools-4.54.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41bb0b250c8132b2fcac148e2e9198e62ff06f3cc472065dff839327945c5882"},
+ {file = "fonttools-4.54.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7965af9b67dd546e52afcf2e38641b5be956d68c425bef2158e95af11d229f10"},
+ {file = "fonttools-4.54.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:278913a168f90d53378c20c23b80f4e599dca62fbffae4cc620c8eed476b723e"},
+ {file = "fonttools-4.54.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0e88e3018ac809b9662615072dcd6b84dca4c2d991c6d66e1970a112503bba7e"},
+ {file = "fonttools-4.54.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4aa4817f0031206e637d1e685251ac61be64d1adef111060df84fdcbc6ab6c44"},
+ {file = "fonttools-4.54.1-cp310-cp310-win32.whl", hash = "sha256:7e3b7d44e18c085fd8c16dcc6f1ad6c61b71ff463636fcb13df7b1b818bd0c02"},
+ {file = "fonttools-4.54.1-cp310-cp310-win_amd64.whl", hash = "sha256:dd9cc95b8d6e27d01e1e1f1fae8559ef3c02c76317da650a19047f249acd519d"},
+ {file = "fonttools-4.54.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5419771b64248484299fa77689d4f3aeed643ea6630b2ea750eeab219588ba20"},
+ {file = "fonttools-4.54.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:301540e89cf4ce89d462eb23a89464fef50915255ece765d10eee8b2bf9d75b2"},
+ {file = "fonttools-4.54.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76ae5091547e74e7efecc3cbf8e75200bc92daaeb88e5433c5e3e95ea8ce5aa7"},
+ {file = "fonttools-4.54.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82834962b3d7c5ca98cb56001c33cf20eb110ecf442725dc5fdf36d16ed1ab07"},
+ {file = "fonttools-4.54.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d26732ae002cc3d2ecab04897bb02ae3f11f06dd7575d1df46acd2f7c012a8d8"},
+ {file = "fonttools-4.54.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:58974b4987b2a71ee08ade1e7f47f410c367cdfc5a94fabd599c88165f56213a"},
+ {file = "fonttools-4.54.1-cp311-cp311-win32.whl", hash = "sha256:ab774fa225238986218a463f3fe151e04d8c25d7de09df7f0f5fce27b1243dbc"},
+ {file = "fonttools-4.54.1-cp311-cp311-win_amd64.whl", hash = "sha256:07e005dc454eee1cc60105d6a29593459a06321c21897f769a281ff2d08939f6"},
+ {file = "fonttools-4.54.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:54471032f7cb5fca694b5f1a0aaeba4af6e10ae989df408e0216f7fd6cdc405d"},
+ {file = "fonttools-4.54.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8fa92cb248e573daab8d032919623cc309c005086d743afb014c836636166f08"},
+ {file = "fonttools-4.54.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a911591200114969befa7f2cb74ac148bce5a91df5645443371aba6d222e263"},
+ {file = "fonttools-4.54.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93d458c8a6a354dc8b48fc78d66d2a8a90b941f7fec30e94c7ad9982b1fa6bab"},
+ {file = "fonttools-4.54.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5eb2474a7c5be8a5331146758debb2669bf5635c021aee00fd7c353558fc659d"},
+ {file = "fonttools-4.54.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c9c563351ddc230725c4bdf7d9e1e92cbe6ae8553942bd1fb2b2ff0884e8b714"},
+ {file = "fonttools-4.54.1-cp312-cp312-win32.whl", hash = "sha256:fdb062893fd6d47b527d39346e0c5578b7957dcea6d6a3b6794569370013d9ac"},
+ {file = "fonttools-4.54.1-cp312-cp312-win_amd64.whl", hash = "sha256:e4564cf40cebcb53f3dc825e85910bf54835e8a8b6880d59e5159f0f325e637e"},
+ {file = "fonttools-4.54.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6e37561751b017cf5c40fce0d90fd9e8274716de327ec4ffb0df957160be3bff"},
+ {file = "fonttools-4.54.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:357cacb988a18aace66e5e55fe1247f2ee706e01debc4b1a20d77400354cddeb"},
+ {file = "fonttools-4.54.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e953cc0bddc2beaf3a3c3b5dd9ab7554677da72dfaf46951e193c9653e515a"},
+ {file = "fonttools-4.54.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:58d29b9a294573d8319f16f2f79e42428ba9b6480442fa1836e4eb89c4d9d61c"},
+ {file = "fonttools-4.54.1-cp313-cp313-win32.whl", hash = "sha256:9ef1b167e22709b46bf8168368b7b5d3efeaaa746c6d39661c1b4405b6352e58"},
+ {file = "fonttools-4.54.1-cp313-cp313-win_amd64.whl", hash = "sha256:262705b1663f18c04250bd1242b0515d3bbae177bee7752be67c979b7d47f43d"},
+ {file = "fonttools-4.54.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ed2f80ca07025551636c555dec2b755dd005e2ea8fbeb99fc5cdff319b70b23b"},
+ {file = "fonttools-4.54.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9dc080e5a1c3b2656caff2ac2633d009b3a9ff7b5e93d0452f40cd76d3da3b3c"},
+ {file = "fonttools-4.54.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d152d1be65652fc65e695e5619e0aa0982295a95a9b29b52b85775243c06556"},
+ {file = "fonttools-4.54.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8583e563df41fdecef31b793b4dd3af8a9caa03397be648945ad32717a92885b"},
+ {file = "fonttools-4.54.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:0d1d353ef198c422515a3e974a1e8d5b304cd54a4c2eebcae708e37cd9eeffb1"},
+ {file = "fonttools-4.54.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fda582236fee135d4daeca056c8c88ec5f6f6d88a004a79b84a02547c8f57386"},
+ {file = "fonttools-4.54.1-cp38-cp38-win32.whl", hash = "sha256:e7d82b9e56716ed32574ee106cabca80992e6bbdcf25a88d97d21f73a0aae664"},
+ {file = "fonttools-4.54.1-cp38-cp38-win_amd64.whl", hash = "sha256:ada215fd079e23e060157aab12eba0d66704316547f334eee9ff26f8c0d7b8ab"},
+ {file = "fonttools-4.54.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5b8a096e649768c2f4233f947cf9737f8dbf8728b90e2771e2497c6e3d21d13"},
+ {file = "fonttools-4.54.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4e10d2e0a12e18f4e2dd031e1bf7c3d7017be5c8dbe524d07706179f355c5dac"},
+ {file = "fonttools-4.54.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31c32d7d4b0958600eac75eaf524b7b7cb68d3a8c196635252b7a2c30d80e986"},
+ {file = "fonttools-4.54.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c39287f5c8f4a0c5a55daf9eaf9ccd223ea59eed3f6d467133cc727d7b943a55"},
+ {file = "fonttools-4.54.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a7a310c6e0471602fe3bf8efaf193d396ea561486aeaa7adc1f132e02d30c4b9"},
+ {file = "fonttools-4.54.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d3b659d1029946f4ff9b6183984578041b520ce0f8fb7078bb37ec7445806b33"},
+ {file = "fonttools-4.54.1-cp39-cp39-win32.whl", hash = "sha256:e96bc94c8cda58f577277d4a71f51c8e2129b8b36fd05adece6320dd3d57de8a"},
+ {file = "fonttools-4.54.1-cp39-cp39-win_amd64.whl", hash = "sha256:e8a4b261c1ef91e7188a30571be6ad98d1c6d9fa2427244c545e2fa0a2494dd7"},
+ {file = "fonttools-4.54.1-py3-none-any.whl", hash = "sha256:37cddd62d83dc4f72f7c3f3c2bcf2697e89a30efb152079896544a93907733bd"},
+ {file = "fonttools-4.54.1.tar.gz", hash = "sha256:957f669d4922f92c171ba01bef7f29410668db09f6c02111e22b2bce446f3285"},
+]
+
+[package.extras]
+all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"]
+graphite = ["lz4 (>=1.7.4.2)"]
+interpolatable = ["munkres", "pycairo", "scipy"]
+lxml = ["lxml (>=4.0)"]
+pathops = ["skia-pathops (>=0.5.0)"]
+plot = ["matplotlib"]
+repacker = ["uharfbuzz (>=0.23.0)"]
+symfont = ["sympy"]
+type1 = ["xattr"]
+ufo = ["fs (>=2.2.0,<3)"]
+unicode = ["unicodedata2 (>=15.1.0)"]
+woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"]
+
+[[package]]
+name = "fqdn"
+version = "1.5.1"
+description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers"
+optional = true
+python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4"
+files = [
+ {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"},
+ {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"},
+]
+
+[[package]]
+name = "freetype-py"
+version = "2.5.1"
+description = "Freetype python bindings"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "freetype-py-2.5.1.zip", hash = "sha256:cfe2686a174d0dd3d71a9d8ee9bf6a2c23f5872385cf8ce9f24af83d076e2fbd"},
+ {file = "freetype_py-2.5.1-py3-none-macosx_10_9_universal2.whl", hash = "sha256:d01ded2557694f06aa0413f3400c0c0b2b5ebcaabeef7aaf3d756be44f51e90b"},
+ {file = "freetype_py-2.5.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d2f6b3d68496797da23204b3b9c4e77e67559c80390fc0dc8b3f454ae1cd819"},
+ {file = "freetype_py-2.5.1-py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:289b443547e03a4f85302e3ac91376838e0d11636050166662a4f75e3087ed0b"},
+ {file = "freetype_py-2.5.1-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:cd3bfdbb7e1a84818cfbc8025fca3096f4f2afcd5d4641184bf0a3a2e6f97bbf"},
+ {file = "freetype_py-2.5.1-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:3c1aefc4f0d5b7425f014daccc5fdc7c6f914fb7d6a695cc684f1c09cd8c1660"},
+ {file = "freetype_py-2.5.1-py3-none-win_amd64.whl", hash = "sha256:0b7f8e0342779f65ca13ef8bc103938366fecade23e6bb37cb671c2b8ad7f124"},
+]
+
[[package]]
name = "frozenlist"
version = "1.4.1"
@@ -1003,13 +1652,13 @@ files = [
[[package]]
name = "fsspec"
-version = "2024.5.0"
+version = "2024.6.1"
description = "File-system specification"
optional = false
python-versions = ">=3.8"
files = [
- {file = "fsspec-2024.5.0-py3-none-any.whl", hash = "sha256:e0fdbc446d67e182f49a70b82cf7889028a63588fde6b222521f10937b2b670c"},
- {file = "fsspec-2024.5.0.tar.gz", hash = "sha256:1d021b0b0f933e3b3029ed808eb400c08ba101ca2de4b3483fbc9ca23fcee94a"},
+ {file = "fsspec-2024.6.1-py3-none-any.whl", hash = "sha256:3cb443f8bcd2efb31295a5b9fdb02aee81d8452c80d28f97a6d0959e6cee101e"},
+ {file = "fsspec-2024.6.1.tar.gz", hash = "sha256:fad7d7e209dd4c1208e3bbfda706620e0da5142bebbd9c384afb95b07e798e49"},
]
[package.dependencies]
@@ -1021,6 +1670,7 @@ adl = ["adlfs"]
arrow = ["pyarrow (>=1)"]
dask = ["dask", "distributed"]
dev = ["pre-commit", "ruff"]
+doc = ["numpydoc", "sphinx", "sphinx-design", "sphinx-rtd-theme", "yarl"]
dropbox = ["dropbox", "dropboxdrivefs", "requests"]
full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"]
fuse = ["fusepy"]
@@ -1042,6 +1692,17 @@ test-downstream = ["aiobotocore (>=2.5.4,<3.0.0)", "dask-expr", "dask[dataframe,
test-full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "cloudpickle", "dask", "distributed", "dropbox", "dropboxdrivefs", "fastparquet", "fusepy", "gcsfs", "jinja2", "kerchunk", "libarchive-c", "lz4", "notebook", "numpy", "ocifs", "pandas", "panel", "paramiko", "pyarrow", "pyarrow (>=1)", "pyftpdlib", "pygit2", "pytest", "pytest-asyncio (!=0.22.0)", "pytest-benchmark", "pytest-cov", "pytest-mock", "pytest-recording", "pytest-rerunfailures", "python-snappy", "requests", "smbprotocol", "tqdm", "urllib3", "zarr", "zstandard"]
tqdm = ["tqdm"]
+[[package]]
+name = "future"
+version = "1.0.0"
+description = "Clean single-source support for Python 3 and 2"
+optional = true
+python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
+files = [
+ {file = "future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216"},
+ {file = "future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05"},
+]
+
[[package]]
name = "gdown"
version = "5.2.0"
@@ -1154,7 +1815,7 @@ pyarrow = ">=12.0.0"
type = "git"
url = "https://github.com/dora-rs/dora-lerobot.git"
reference = "HEAD"
-resolved_reference = "fda22deba84c46695369736edd34dc740aef45eb"
+resolved_reference = "7844fbdb97d467a4672be3eb102ebca96211e95b"
subdirectory = "gym_dora"
[[package]]
@@ -1254,119 +1915,316 @@ mujoco-py = ["cython (<3)", "mujoco-py (>=2.1,<2.2)"]
testing = ["Jinja2 (>=3.0.3)", "PettingZoo (>=1.23.0)", "cython (<3)", "mujoco-py (>=2.1,<2.2)", "pytest (==7.0.1)"]
[[package]]
-name = "h5py"
-version = "3.11.0"
-description = "Read and write HDF5 files from Python"
+name = "h11"
+version = "0.14.0"
+description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
+ {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
+]
+
+[[package]]
+name = "h5py"
+version = "3.12.1"
+description = "Read and write HDF5 files from Python"
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
files = [
- {file = "h5py-3.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1625fd24ad6cfc9c1ccd44a66dac2396e7ee74940776792772819fc69f3a3731"},
- {file = "h5py-3.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c072655ad1d5fe9ef462445d3e77a8166cbfa5e599045f8aa3c19b75315f10e5"},
- {file = "h5py-3.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77b19a40788e3e362b54af4dcf9e6fde59ca016db2c61360aa30b47c7b7cef00"},
- {file = "h5py-3.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:ef4e2f338fc763f50a8113890f455e1a70acd42a4d083370ceb80c463d803972"},
- {file = "h5py-3.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bbd732a08187a9e2a6ecf9e8af713f1d68256ee0f7c8b652a32795670fb481ba"},
- {file = "h5py-3.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75bd7b3d93fbeee40860fd70cdc88df4464e06b70a5ad9ce1446f5f32eb84007"},
- {file = "h5py-3.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52c416f8eb0daae39dabe71415cb531f95dce2d81e1f61a74537a50c63b28ab3"},
- {file = "h5py-3.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:083e0329ae534a264940d6513f47f5ada617da536d8dccbafc3026aefc33c90e"},
- {file = "h5py-3.11.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a76cae64080210389a571c7d13c94a1a6cf8cb75153044fd1f822a962c97aeab"},
- {file = "h5py-3.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3736fe21da2b7d8a13fe8fe415f1272d2a1ccdeff4849c1421d2fb30fd533bc"},
- {file = "h5py-3.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa6ae84a14103e8dc19266ef4c3e5d7c00b68f21d07f2966f0ca7bdb6c2761fb"},
- {file = "h5py-3.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:21dbdc5343f53b2e25404673c4f00a3335aef25521bd5fa8c707ec3833934892"},
- {file = "h5py-3.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:754c0c2e373d13d6309f408325343b642eb0f40f1a6ad21779cfa9502209e150"},
- {file = "h5py-3.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:731839240c59ba219d4cb3bc5880d438248533366f102402cfa0621b71796b62"},
- {file = "h5py-3.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ec9df3dd2018904c4cc06331951e274f3f3fd091e6d6cc350aaa90fa9b42a76"},
- {file = "h5py-3.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:55106b04e2c83dfb73dc8732e9abad69d83a436b5b82b773481d95d17b9685e1"},
- {file = "h5py-3.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f4e025e852754ca833401777c25888acb96889ee2c27e7e629a19aee288833f0"},
- {file = "h5py-3.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c4b760082626120031d7902cd983d8c1f424cdba2809f1067511ef283629d4b"},
- {file = "h5py-3.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67462d0669f8f5459529de179f7771bd697389fcb3faab54d63bf788599a48ea"},
- {file = "h5py-3.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:d9c944d364688f827dc889cf83f1fca311caf4fa50b19f009d1f2b525edd33a3"},
- {file = "h5py-3.11.0.tar.gz", hash = "sha256:7b7e8f78072a2edec87c9836f25f34203fd492a4475709a18b417a33cfb21fa9"},
-]
-
-[package.dependencies]
-numpy = ">=1.17.3"
+ {file = "h5py-3.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f0f1a382cbf494679c07b4371f90c70391dedb027d517ac94fa2c05299dacda"},
+ {file = "h5py-3.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cb65f619dfbdd15e662423e8d257780f9a66677eae5b4b3fc9dca70b5fd2d2a3"},
+ {file = "h5py-3.12.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b15d8dbd912c97541312c0e07438864d27dbca857c5ad634de68110c6beb1c2"},
+ {file = "h5py-3.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59685fe40d8c1fbbee088c88cd4da415a2f8bee5c270337dc5a1c4aa634e3307"},
+ {file = "h5py-3.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:577d618d6b6dea3da07d13cc903ef9634cde5596b13e832476dd861aaf651f3e"},
+ {file = "h5py-3.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ccd9006d92232727d23f784795191bfd02294a4f2ba68708825cb1da39511a93"},
+ {file = "h5py-3.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ad8a76557880aed5234cfe7279805f4ab5ce16b17954606cca90d578d3e713ef"},
+ {file = "h5py-3.12.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1473348139b885393125126258ae2d70753ef7e9cec8e7848434f385ae72069e"},
+ {file = "h5py-3.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:018a4597f35092ae3fb28ee851fdc756d2b88c96336b8480e124ce1ac6fb9166"},
+ {file = "h5py-3.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:3fdf95092d60e8130ba6ae0ef7a9bd4ade8edbe3569c13ebbaf39baefffc5ba4"},
+ {file = "h5py-3.12.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:06a903a4e4e9e3ebbc8b548959c3c2552ca2d70dac14fcfa650d9261c66939ed"},
+ {file = "h5py-3.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7b3b8f3b48717e46c6a790e3128d39c61ab595ae0a7237f06dfad6a3b51d5351"},
+ {file = "h5py-3.12.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:050a4f2c9126054515169c49cb900949814987f0c7ae74c341b0c9f9b5056834"},
+ {file = "h5py-3.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c4b41d1019322a5afc5082864dfd6359f8935ecd37c11ac0029be78c5d112c9"},
+ {file = "h5py-3.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:e4d51919110a030913201422fb07987db4338eba5ec8c5a15d6fab8e03d443fc"},
+ {file = "h5py-3.12.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:513171e90ed92236fc2ca363ce7a2fc6f2827375efcbb0cc7fbdd7fe11fecafc"},
+ {file = "h5py-3.12.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:59400f88343b79655a242068a9c900001a34b63e3afb040bd7cdf717e440f653"},
+ {file = "h5py-3.12.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3e465aee0ec353949f0f46bf6c6f9790a2006af896cee7c178a8c3e5090aa32"},
+ {file = "h5py-3.12.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba51c0c5e029bb5420a343586ff79d56e7455d496d18a30309616fdbeed1068f"},
+ {file = "h5py-3.12.1-cp313-cp313-win_amd64.whl", hash = "sha256:52ab036c6c97055b85b2a242cb540ff9590bacfda0c03dd0cf0661b311f522f8"},
+ {file = "h5py-3.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2b8dd64f127d8b324f5d2cd1c0fd6f68af69084e9e47d27efeb9e28e685af3e"},
+ {file = "h5py-3.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4532c7e97fbef3d029735db8b6f5bf01222d9ece41e309b20d63cfaae2fb5c4d"},
+ {file = "h5py-3.12.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fdf6d7936fa824acfa27305fe2d9f39968e539d831c5bae0e0d83ed521ad1ac"},
+ {file = "h5py-3.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84342bffd1f82d4f036433e7039e241a243531a1d3acd7341b35ae58cdab05bf"},
+ {file = "h5py-3.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:62be1fc0ef195891949b2c627ec06bc8e837ff62d5b911b6e42e38e0f20a897d"},
+ {file = "h5py-3.12.1.tar.gz", hash = "sha256:326d70b53d31baa61f00b8aa5f95c2fcb9621a3ee8365d770c551a13dbbcbfdf"},
+]
+
+[package.dependencies]
+numpy = ">=1.19.3"
+
+[[package]]
+name = "hello-robot-stretch-body"
+version = "0.7.27"
+description = "Stretch Body low level Python API"
+optional = true
+python-versions = "*"
+files = [
+ {file = "hello_robot_stretch_body-0.7.27-py3-none-any.whl", hash = "sha256:740e6abae4a0ba43b23ce7831129e3ef9356acd706ea73b5512873b04ba3c5f0"},
+ {file = "hello_robot_stretch_body-0.7.27.tar.gz", hash = "sha256:dd289ea95f9df7be1306cbc26ac75037946db04f4f22503fc6e2741a57c68732"},
+]
+
+[package.dependencies]
+aioserial = "*"
+chime = "*"
+click = "*"
+cma = "*"
+colorama = "*"
+drawnow = "*"
+dynamixel-sdk = "*"
+filelock = "*"
+gitpython = "*"
+hello-robot-stretch-body-tools = ">=0.4.2"
+hello-robot-stretch-factory = ">=0.3.5"
+hello-robot-stretch-tool-share = ">=0.3.3"
+hello-robot-stretch-urdf = ">=0.0.19"
+inputs = "*"
+ipython = "*"
+jupyter = "*"
+matplotlib = "*"
+meshio = "*"
+nose = "*"
+numba = "*"
+numpy = ">=1.24"
+numpy-stl = "*"
+open3d = "*"
+opencv-contrib-python = "*"
+pandas = "*"
+pathlib = "*"
+pixel-ring = "*"
+psutil = "*"
+pyrealsense2 = "*"
+pyrender = "*"
+pyusb = "*"
+pyyaml = ">=5.1"
+renamed-opencv-python-inference-engine = {version = "*", markers = "python_version >= \"3.0.0\""}
+rplidar-roboticia = "*"
+scikit-image = "*"
+scipy = "*"
+snakeviz = "*"
+SpeechRecognition = "*"
+sympy = "*"
+transforms3d = ">=0.4.2"
+urchin = "*"
+urdf-parser-py = "*"
+
+[[package]]
+name = "hello-robot-stretch-body-tools"
+version = "0.7.13"
+description = "Stretch Body Tools"
+optional = true
+python-versions = "*"
+files = [
+ {file = "hello_robot_stretch_body_tools-0.7.13-py3-none-any.whl", hash = "sha256:f12bd4ee40e48c11e68392e7fd91c3a752e87d44d864d1adb3998b30c0166e75"},
+ {file = "hello_robot_stretch_body_tools-0.7.13.tar.gz", hash = "sha256:9ce65bfc9a53444b7622c3479ab45c6aa9369618eb3bf102ef1172474d1873b7"},
+]
+
+[package.dependencies]
+click = "*"
+cma = "*"
+colorama = "*"
+drawnow = "*"
+filelock = "*"
+gitpython = "*"
+inputs = "*"
+ipython = "*"
+matplotlib = "*"
+nose = "*"
+numpy = ">=1.24"
+open3d = "*"
+opencv-contrib-python = "*"
+packaging = "*"
+pandas = "*"
+pixel-ring = "*"
+pyaudio = "*"
+pyrealsense2 = "*"
+pyusb = "*"
+pyyaml = ">=5.1"
+rplidar-roboticia = "*"
+scikit-image = "*"
+scipy = "*"
+sh = "*"
+snakeviz = "*"
+SpeechRecognition = "*"
+sympy = "*"
+trimesh = "4.4.7"
+urchin = "*"
+xmltodict = "*"
+
+[[package]]
+name = "hello-robot-stretch-factory"
+version = "0.5.6"
+description = "Stretch Factory Tools"
+optional = true
+python-versions = "*"
+files = [
+ {file = "hello-robot-stretch-factory-0.5.6.tar.gz", hash = "sha256:e2b060daf5eda699781cde96faf608b7ed3c234ac5b22317f028a69f889846de"},
+ {file = "hello_robot_stretch_factory-0.5.6-py3-none-any.whl", hash = "sha256:09bb97bf1fc146855843af042684d1820d6b1775945dbc3e1cd44eff75be702f"},
+]
+
+[package.dependencies]
+future = "*"
+gitpython = "*"
+hello-robot-stretch-body = ">=0.4.26"
+pyserial = "*"
+python-xlib = "*"
+pyusb = "*"
+tabulate = "*"
+
+[[package]]
+name = "hello-robot-stretch-tool-share"
+version = "0.3.4"
+description = "Stretch end of arm tool interfaces"
+optional = true
+python-versions = "*"
+files = [
+ {file = "hello_robot_stretch_tool_share-0.3.4-py3-none-any.whl", hash = "sha256:230d24f88a84cc983c019078911c579882d9c2c9e24129e5acbe1c756189a1d1"},
+ {file = "hello_robot_stretch_tool_share-0.3.4.tar.gz", hash = "sha256:8e0a2cea088dcb50e41257aade5c6190964a0f1407f1f54f24d114ff31ecb2c6"},
+]
+
+[[package]]
+name = "hello-robot-stretch-urdf"
+version = "0.1.0"
+description = "Stretch URDF"
+optional = true
+python-versions = "*"
+files = [
+ {file = "hello_robot_stretch_urdf-0.1.0-py3-none-any.whl", hash = "sha256:324f5ce0834b45b343e84bb8e8f5cbdd02f1315c6954856f0c68badb2b03e026"},
+ {file = "hello_robot_stretch_urdf-0.1.0.tar.gz", hash = "sha256:51ed5984dbb6538e9f7cdc573b8a4a283118a13faaa06dc773c9bdda8bfe1034"},
+]
+
+[package.dependencies]
+urchin = "*"
[[package]]
name = "hf-transfer"
-version = "0.1.6"
-description = ""
+version = "0.1.8"
+description = "Speed up file transfers with the Hugging Face Hub."
optional = false
python-versions = ">=3.7"
files = [
- {file = "hf_transfer-0.1.6-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6fd3d61f9229d27def007e53540412507b74ac2fdb1a29985ae0b6a5137749a2"},
- {file = "hf_transfer-0.1.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b043bb78df1225de043eb041de9d97783fcca14a0bdc1b1d560fc172fc21b648"},
- {file = "hf_transfer-0.1.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7db60dd18eae4fa6ea157235fb82196cde5313995b396d1b591aad3b790a7f8f"},
- {file = "hf_transfer-0.1.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:30d31dbab9b5a558cce407b8728e39d87d7af1ef8745ddb90187e9ae0b9e1e90"},
- {file = "hf_transfer-0.1.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6b368bddd757efc7af3126ba81f9ac8f9435e2cc00902cb3d64f2be28d8f719"},
- {file = "hf_transfer-0.1.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa2086d8aefaaa3e144e167324574882004c0cec49bf2d0638ec4b74732d8da0"},
- {file = "hf_transfer-0.1.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:45d8985a0940bfe1535cb4ca781f5c11e47c83798ef3373ee1f5d57bbe527a9c"},
- {file = "hf_transfer-0.1.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f42b89735f1cde22f2a795d1f0915741023235666be7de45879e533c7d6010c"},
- {file = "hf_transfer-0.1.6-cp310-none-win32.whl", hash = "sha256:2d2c4c4613f3ad45b6ce6291e347b2d3ba1b86816635681436567e461cb3c961"},
- {file = "hf_transfer-0.1.6-cp310-none-win_amd64.whl", hash = "sha256:78b0eed8d8dce60168a46e584b9742b816af127d7e410a713e12c31249195342"},
- {file = "hf_transfer-0.1.6-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f1d8c172153f9a6cdaecf137612c42796076f61f6bea1072c90ac2e17c1ab6fa"},
- {file = "hf_transfer-0.1.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2c601996351f90c514a75a0eeb02bf700b1ad1db2d946cbfe4b60b79e29f0b2f"},
- {file = "hf_transfer-0.1.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e585c808405557d3f5488f385706abb696997bbae262ea04520757e30836d9d"},
- {file = "hf_transfer-0.1.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ec51af1e8cf4268c268bd88932ade3d7ca895a3c661b42493503f02610ae906b"},
- {file = "hf_transfer-0.1.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d106fdf996332f6df3ed3fab6d6332df82e8c1fb4b20fd81a491ca4d2ab5616a"},
- {file = "hf_transfer-0.1.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e9c2ee9e9fde5a0319cc0e8ddfea10897482bc06d5709b10a238f1bc2ebcbc0b"},
- {file = "hf_transfer-0.1.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f394ea32bc7802b061e549d3133efc523b4ae4fd19bf4b74b183ca6066eef94e"},
- {file = "hf_transfer-0.1.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4282f09902114cd67fca98a1a1bad569a44521a8395fedf327e966714f68b977"},
- {file = "hf_transfer-0.1.6-cp311-none-win32.whl", hash = "sha256:276dbf307d5ab6f1bcbf57b5918bfcf9c59d6848ccb28242349e1bb5985f983b"},
- {file = "hf_transfer-0.1.6-cp311-none-win_amd64.whl", hash = "sha256:fa475175c51451186bea804471995fa8e7b2a48a61dcca55534911dc25955527"},
- {file = "hf_transfer-0.1.6-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:23d157a67acfa00007799323a1c441b2bbacc7dee625b016b7946fe0e25e6c89"},
- {file = "hf_transfer-0.1.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6067342a2864b988f861cd2d31bd78eb1e84d153a3f6df38485b6696d9ad3013"},
- {file = "hf_transfer-0.1.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91cfcb3070e205b58fa8dc8bcb6a62ccc40913fcdb9cd1ff7c364c8e3aa85345"},
- {file = "hf_transfer-0.1.6-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb76064ac5165d5eeaaf8d0903e8bf55477221ecc2a4a4d69f0baca065ab905b"},
- {file = "hf_transfer-0.1.6-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dabd3a177d83028f164984cf4dd859f77ec1e20c97a6f307ff8fcada0785ef1"},
- {file = "hf_transfer-0.1.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0bf4254e44f64a26e0a5b73b5d7e8d91bb36870718fb4f8e126ec943ff4c805"},
- {file = "hf_transfer-0.1.6-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d32c1b106f38f336ceb21531f4db9b57d777b9a33017dafdb6a5316388ebe50"},
- {file = "hf_transfer-0.1.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff05aba3c83921e5c7635ba9f07c693cc893350c447644824043aeac27b285f5"},
- {file = "hf_transfer-0.1.6-cp312-none-win32.whl", hash = "sha256:051ef0c55607652cb5974f59638da035773254b9a07d7ee5b574fe062de4c9d1"},
- {file = "hf_transfer-0.1.6-cp312-none-win_amd64.whl", hash = "sha256:716fb5c574fcbdd8092ce73f9b6c66f42e3544337490f77c60ec07df02bd081b"},
- {file = "hf_transfer-0.1.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c0c981134a55965e279cb7be778c1ccaf93f902fc9ebe31da4f30caf824cc4d"},
- {file = "hf_transfer-0.1.6-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ef1f145f04c5b573915bcb1eb5db4039c74f6b46fce73fc473c4287e613b623"},
- {file = "hf_transfer-0.1.6-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d0a7609b004db3347dbb7796df45403eceb171238210d054d93897d6d84c63a4"},
- {file = "hf_transfer-0.1.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60f0864bf5996773dbd5f8ae4d1649041f773fe9d5769f4c0eeb5553100acef3"},
- {file = "hf_transfer-0.1.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d01e55d630ffe70a4f5d0ed576a04c6a48d7c65ca9a7d18f2fca385f20685a9"},
- {file = "hf_transfer-0.1.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d855946c5062b665190de15b2bdbd4c8eddfee35350bfb7564592e23d36fbbd3"},
- {file = "hf_transfer-0.1.6-cp37-none-win32.whl", hash = "sha256:fd40b2409cfaf3e8aba20169ee09552f69140e029adeec261b988903ff0c8f6f"},
- {file = "hf_transfer-0.1.6-cp37-none-win_amd64.whl", hash = "sha256:0e0eba49d46d3b5481919aea0794aec625fbc6ecdf13fe7e0e9f3fc5d5ad5971"},
- {file = "hf_transfer-0.1.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7e669fecb29fc454449739f9f53ed9253197e7c19e6a6eaa0f08334207af4287"},
- {file = "hf_transfer-0.1.6-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:89f701802892e5eb84f89f402686861f87dc227d6082b05f4e9d9b4e8015a3c3"},
- {file = "hf_transfer-0.1.6-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6f2b0c8b95b01409275d789a9b74d5f2e146346f985d384bf50ec727caf1ccc"},
- {file = "hf_transfer-0.1.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa855a2fa262792a230f9efcdb5da6d431b747d1861d2a69fe7834b19aea077e"},
- {file = "hf_transfer-0.1.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa8ca349afb2f0713475426946261eb2035e4efb50ebd2c1d5ad04f395f4217"},
- {file = "hf_transfer-0.1.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01255f043996bc7d1bae62d8afc5033a90c7e36ce308b988eeb84afe0a69562f"},
- {file = "hf_transfer-0.1.6-cp38-none-win32.whl", hash = "sha256:60b1db183e8a7540cd4f8b2160ff4de55f77cb0c3fc6a10be1e7c30eb1b2bdeb"},
- {file = "hf_transfer-0.1.6-cp38-none-win_amd64.whl", hash = "sha256:fb8be3cba6aaa50ab2e9dffbd25c8eb2046785eeff642cf0cdd0dd9ae6be3539"},
- {file = "hf_transfer-0.1.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d09af35e3e3f09b664e6429e9a0dc200f29c5bdfd88bdd9666de51183b1fe202"},
- {file = "hf_transfer-0.1.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a4505bd707cc14d85c800f961fad8ca76f804a8ad22fbb7b1a217d8d0c15e6a5"},
- {file = "hf_transfer-0.1.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c453fd8b0be9740faa23cecd1f28ee9ead7d900cefa64ff836960c503a744c9"},
- {file = "hf_transfer-0.1.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13cb8884e718a78c3b81a8cdec9c7ac196dd42961fce55c3ccff3dd783e5ad7a"},
- {file = "hf_transfer-0.1.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39cd39df171a2b5404de69c4e6cd14eee47f6fe91c1692f939bfb9e59a0110d8"},
- {file = "hf_transfer-0.1.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ff0629ee9f98df57a783599602eb498f9ec3619dc69348b12e4d9d754abf0e9"},
- {file = "hf_transfer-0.1.6-cp39-none-win32.whl", hash = "sha256:164a6ce445eb0cc7c645f5b6e1042c003d33292520c90052b6325f30c98e4c5f"},
- {file = "hf_transfer-0.1.6-cp39-none-win_amd64.whl", hash = "sha256:11b8b4b73bf455f13218c5f827698a30ae10998ca31b8264b51052868c7a9f11"},
- {file = "hf_transfer-0.1.6-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16957ba057376a99ea361074ce1094f61b58e769defa6be2422ae59c0b6a6530"},
- {file = "hf_transfer-0.1.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7db952112e3b8ee1a5cbf500d2443e9ce4fb893281c5310a3e31469898628005"},
- {file = "hf_transfer-0.1.6-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d39d826a7344f5e39f438d62632acd00467aa54a083b66496f61ef67a9885a56"},
- {file = "hf_transfer-0.1.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a4e2653fbfa92e7651db73d99b697c8684e7345c479bd6857da80bed6138abb2"},
- {file = "hf_transfer-0.1.6-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:144277e6a86add10b90ec3b583253aec777130312256bfc8d5ade5377e253807"},
- {file = "hf_transfer-0.1.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3bb53bcd16365313b2aa0dbdc28206f577d70770f31249cdabc387ac5841edcc"},
- {file = "hf_transfer-0.1.6-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:990d73a5a68d8261980f146c51f4c5f9995314011cb225222021ad7c39f3af2d"},
- {file = "hf_transfer-0.1.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:652406037029ab9b4097b4c5f29321bad5f64c2b46fbff142509d918aec87c29"},
- {file = "hf_transfer-0.1.6.tar.gz", hash = "sha256:deb505a7d417d7055fd7b3549eadb91dfe782941261f3344025c486c16d1d2f9"},
+ {file = "hf_transfer-0.1.8-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:70858f9e94286738ed300484a45beb5cfee6a7ddac4c5886f9c6fce7823ac5ab"},
+ {file = "hf_transfer-0.1.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:38adc73f0a8526319d90f7cc5dc2d5e4bb66f487a513d94b98aa6725be732e4a"},
+ {file = "hf_transfer-0.1.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d2f0c08198d8d899fe9d66e86aee2dd844bd7ce33888f261373fcec81d2a54"},
+ {file = "hf_transfer-0.1.8-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1de2a4ef36f9e60b3d3bec00193c0aafd75771709f2ca51b9b162373f5af3d32"},
+ {file = "hf_transfer-0.1.8-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e319269e3606a5ff2979296841766649ac73598a4a8eee2a968f86c8071fea5a"},
+ {file = "hf_transfer-0.1.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f6026cf3be6a53ea42f92172f60c1c0675baaa9073f865e671b661dde5fd157"},
+ {file = "hf_transfer-0.1.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f865c33ada5bd3650c2b46e59979f2d7755c3f517f8d0facc78576a0c7d26406"},
+ {file = "hf_transfer-0.1.8-cp310-none-win32.whl", hash = "sha256:2054730e8d8ed21917c64be7199e06424b2bd08df1c43a72766afaed7992f2d3"},
+ {file = "hf_transfer-0.1.8-cp310-none-win_amd64.whl", hash = "sha256:2b4f1a9446ba31170b5b1eca4e916504d18378a6b5fe959896bdac8a736a5ecb"},
+ {file = "hf_transfer-0.1.8-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:e27c15fcc5869ad7e52bbc0bdec6106b288d1c463f8d2da92f28615a3b181361"},
+ {file = "hf_transfer-0.1.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:871a0032d011ebc6409a73a8406b98b84ff2cd3ed7d9e1af8cdf4d660b9fab9b"},
+ {file = "hf_transfer-0.1.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:686fa756e1e0214bb6327d33c66732c52274d94a8460beb50604ad988b391cf6"},
+ {file = "hf_transfer-0.1.8-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:36a03b1b2911b0cf15b1b9d971a34b32dadcc4f2fd979aaff5979d6ce4017c34"},
+ {file = "hf_transfer-0.1.8-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:079db90c81f41f4cf3227dfaaa855a9b8e9aef45bc7c2be29ce7232cd83ff881"},
+ {file = "hf_transfer-0.1.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac08a4524127fdd14c234d4bcbe49d1c498acf5335c781714823179bcc8dc039"},
+ {file = "hf_transfer-0.1.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:837432e73cb17274a6782b6216e8ce058aa325a475dc44a5a6a753d48b86d18a"},
+ {file = "hf_transfer-0.1.8-cp311-none-win32.whl", hash = "sha256:b180f9823dde35aba9bc0f1d0c04ac8a873baebd3732a7ffe4f11940abc7df0d"},
+ {file = "hf_transfer-0.1.8-cp311-none-win_amd64.whl", hash = "sha256:37907d2135cebcf8b6d419bb575148d89c224f16b69357f027bd29d0e85c6529"},
+ {file = "hf_transfer-0.1.8-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:baf948f4f493949309cbe60529620b9b0aef854a22b6e526753364acc57c09b6"},
+ {file = "hf_transfer-0.1.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bce5c8bdefa478c5d5eaa646cc4ce1df5cfe764d98572ad0c6b8773e98d49f6"},
+ {file = "hf_transfer-0.1.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54d6f8a1a86128d651a3799e1267c343d60f81f2c565d7c5416eb8e674e4cf0e"},
+ {file = "hf_transfer-0.1.8-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f79fd1b0c2ed93efb4c5f684118d7a762ecdd218e170df8208c4e13d3dcd4959"},
+ {file = "hf_transfer-0.1.8-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:414df35692670683bf5623498ef9d88a8df5d77e9516515da6e2b34d1054c11f"},
+ {file = "hf_transfer-0.1.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c9798d5f951f66b96d40a7a53910260cb5874fda56cf5944dddb7c571f37ec3"},
+ {file = "hf_transfer-0.1.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:060c661691f85a61392e57579c80eb64b5ee277434e81fb582f605c1c8ff05d5"},
+ {file = "hf_transfer-0.1.8-cp312-none-win32.whl", hash = "sha256:f7840e32379820c3e1571a480238e05ea043e970c99d2e999578004a2eb17788"},
+ {file = "hf_transfer-0.1.8-cp312-none-win_amd64.whl", hash = "sha256:9a3204ec423cc5e659872e8179f8704ad9ce2abb1e6a991f8838aedf1dc07830"},
+ {file = "hf_transfer-0.1.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09949e86ad63ee139e463fd0dfaf401515ae70445854199f61d545514c65f744"},
+ {file = "hf_transfer-0.1.8-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf1a74552845b93ea972e6e7131ef54e56056aa54137e93a40faf3fbcb2442ff"},
+ {file = "hf_transfer-0.1.8-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959bcb3afb4ee6f2a07031a947dba98ec0b64c001bc914fbd8fc32e13a287162"},
+ {file = "hf_transfer-0.1.8-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e01eecdb8162bd61dab9090fbd9f8034dd8b5755ef727a21ca8a057f80cb91ee"},
+ {file = "hf_transfer-0.1.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50650a38e9d31f5ad8f010e4598bf304ecd99c17162e7d93f67e031571b864ee"},
+ {file = "hf_transfer-0.1.8-cp37-none-win32.whl", hash = "sha256:e29b9d1d378138f2f4eae0e93ca94af3b5d45f4532eef69f1ab97fe06f9c9d9e"},
+ {file = "hf_transfer-0.1.8-cp37-none-win_amd64.whl", hash = "sha256:cfd6cef43ae883103117a371f8ebae4e7f9637bc6fb480f1be5568e2fe22a8a7"},
+ {file = "hf_transfer-0.1.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92a68f7a0043cca8a0de4decc760dca177530944cbab502afac503bd1b2fa01a"},
+ {file = "hf_transfer-0.1.8-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e3138e408179f80a5480598e32f8e1abb564915cbde4d3bc8da52811c75dc3ea"},
+ {file = "hf_transfer-0.1.8-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4544d148930ad34442d43b8fa911c8479c04a95b858b1d1f91e0b7da77082fad"},
+ {file = "hf_transfer-0.1.8-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a851794b9f029965664f8c3002c957fccf21685e9397ceb4f9f19c986dee8ad3"},
+ {file = "hf_transfer-0.1.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:791aaf87c5319ac83edb6ab2994b3db19924c49d6ff667dd3d8a610b455ff70a"},
+ {file = "hf_transfer-0.1.8-cp38-none-win32.whl", hash = "sha256:8f71e5d35d3a3160dcca12fdcc8119033aeacaa6a32838a7ad9f9cb1008bbe58"},
+ {file = "hf_transfer-0.1.8-cp38-none-win_amd64.whl", hash = "sha256:543287b4ceb1e25501580b99690f7f0df9d3631d29306f37cbd97e918c732944"},
+ {file = "hf_transfer-0.1.8-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:7ce02a18bd0bb2343e707ac85b68c946bc37623ee24150c69158f6b2b2c7a98f"},
+ {file = "hf_transfer-0.1.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:64d7f8dbd64ba183ed1df75d47c84e075ff666ceaa335bff1de16b09eaac5b80"},
+ {file = "hf_transfer-0.1.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e7858694e11419ae27e542fb8fc0d0e54d46ff7768fe73bc359d70b8f5aa578"},
+ {file = "hf_transfer-0.1.8-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed116cd9d1edfa32c0136d7cb8e5f1afd2b32df43c49085d428f108fc8e1c8f"},
+ {file = "hf_transfer-0.1.8-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e385d0da9c6b3472ab29285d2d46c9f9903205b8d108f88a82f3f85aafae0ab"},
+ {file = "hf_transfer-0.1.8-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98f75fa4b86ef15433cd907807ac77d1fb39d7e7b790bfd39c7ae9c385bf0200"},
+ {file = "hf_transfer-0.1.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a63ad947d2901425ac0a3ed70c3696dfde27fadb0482ed763bdd5cc946b278"},
+ {file = "hf_transfer-0.1.8-cp39-none-win32.whl", hash = "sha256:3e74096915813ae842ea6a5bdf10c0fef960aa51a35a560955b3e61cdfe3db57"},
+ {file = "hf_transfer-0.1.8-cp39-none-win_amd64.whl", hash = "sha256:05ea16307bf4a5eb097cbc6e5057e4eb5e080a138af23ef639fd38857723c288"},
+ {file = "hf_transfer-0.1.8-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:928ff036c3e98e10dcfbdb4fcdfc4592d37a5cc8e365a7ba8dfd4337e849d675"},
+ {file = "hf_transfer-0.1.8-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d49ba3ce67035f460ae1924fe2feafec155cb535eec7f31ed5109c19064cd294"},
+ {file = "hf_transfer-0.1.8-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b01f5872c62cfee3ec9ca5c738818296f69f8adf84b4d8d15f2a5601d9dda339"},
+ {file = "hf_transfer-0.1.8-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:659d4212d50847a5165666bf43d67727679b4f694ef9c413613cc27093136527"},
+ {file = "hf_transfer-0.1.8.tar.gz", hash = "sha256:26d229468152e7a3ec12664cac86b8c2800695fd85f9c9a96677a775cc04f0b3"},
+]
+
+[[package]]
+name = "httpcore"
+version = "1.0.6"
+description = "A minimal low-level HTTP client."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"},
+ {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"},
+]
+
+[package.dependencies]
+certifi = "*"
+h11 = ">=0.13,<0.15"
+
+[package.extras]
+asyncio = ["anyio (>=4.0,<5.0)"]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (==1.*)"]
+trio = ["trio (>=0.22.0,<1.0)"]
+
+[[package]]
+name = "httpx"
+version = "0.27.2"
+description = "The next generation HTTP client."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"},
+ {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"},
]
+[package.dependencies]
+anyio = "*"
+certifi = "*"
+httpcore = "==1.*"
+idna = "*"
+sniffio = "*"
+
+[package.extras]
+brotli = ["brotli", "brotlicffi"]
+cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
+http2 = ["h2 (>=3,<5)"]
+socks = ["socksio (==1.*)"]
+zstd = ["zstandard (>=0.18.0)"]
+
[[package]]
name = "huggingface-hub"
-version = "0.25.0"
+version = "0.25.2"
description = "Client library to download and publish models, datasets and other repos on the huggingface.co hub"
optional = false
python-versions = ">=3.8.0"
files = [
- {file = "huggingface_hub-0.25.0-py3-none-any.whl", hash = "sha256:e2f357b35d72d5012cfd127108c4e14abcd61ba4ebc90a5a374dc2456cb34e12"},
- {file = "huggingface_hub-0.25.0.tar.gz", hash = "sha256:fb5fbe6c12fcd99d187ec7db95db9110fb1a20505f23040a5449a717c1a0db4d"},
+ {file = "huggingface_hub-0.25.2-py3-none-any.whl", hash = "sha256:1897caf88ce7f97fe0110603d8f66ac264e3ba6accdf30cd66cc0fed5282ad25"},
+ {file = "huggingface_hub-0.25.2.tar.gz", hash = "sha256:a1014ea111a5f40ccd23f7f7ba8ac46e20fa3b658ced1f86a00c75c06ec6423c"},
]
[package.dependencies]
@@ -1412,13 +2270,13 @@ packaging = "*"
[[package]]
name = "identify"
-version = "2.6.0"
+version = "2.6.1"
description = "File identification library for Python"
optional = true
python-versions = ">=3.8"
files = [
- {file = "identify-2.6.0-py2.py3-none-any.whl", hash = "sha256:e79ae4406387a9d300332b5fd366d8994f1525e8414984e1a59e058b2eda2dd0"},
- {file = "identify-2.6.0.tar.gz", hash = "sha256:cb171c685bdc31bcc4c1734698736a7d5b6c8bf2e0c15117f4d469c8640ae5cf"},
+ {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"},
+ {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"},
]
[package.extras]
@@ -1426,50 +2284,60 @@ license = ["ukkonen"]
[[package]]
name = "idna"
-version = "3.7"
+version = "3.10"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
-python-versions = ">=3.5"
+python-versions = ">=3.6"
files = [
- {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
- {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
+ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
+ {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
]
+[package.extras]
+all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]
+
[[package]]
name = "imagecodecs"
-version = "2024.6.1"
+version = "2024.9.22"
description = "Image transformation, compression, and decompression codecs"
optional = true
python-versions = ">=3.9"
files = [
- {file = "imagecodecs-2024.6.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:369816eaddfe6e9d8d1faa6794341c89f3494fef846c9b5d834f77dc7583bfdf"},
- {file = "imagecodecs-2024.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9300198b5e4ec09f94f5d6bdd6f727d02bbccba5ed0c974e9931d3f9d5d7fa35"},
- {file = "imagecodecs-2024.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42eafc88b3dd44cfb7a8b076ff64ff794874e88d45c3691b32e9e93fbc42e86e"},
- {file = "imagecodecs-2024.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f428f141ed102cecf98ffbe61ed02b1f3f7d65db0bf1459cf2e7b1a53fb279b2"},
- {file = "imagecodecs-2024.6.1-cp310-cp310-win32.whl", hash = "sha256:5526a7e41939613a5c68403911f7cc738f6fe1a8ac0456535720f53253497b76"},
- {file = "imagecodecs-2024.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:8d3d82ebe83a3e31ab7f09a8b72155c5436a3f87145052ca57dc5caf3a2dc9c0"},
- {file = "imagecodecs-2024.6.1-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:328ea133e0f292cf54c5feb13e247fbf45a6055c8dc6822e841c208d2dc5c96a"},
- {file = "imagecodecs-2024.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8045ea3a9c9de78ea00e2a387f47d784434bfad05967decbe0c1b3bee5aadf25"},
- {file = "imagecodecs-2024.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42bd9ec14e4d38f15e2fa387c90b726dba42c16da0a9b6ff2c23e01478b8cd93"},
- {file = "imagecodecs-2024.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eb5b2d755a64de9a7e0604d5dcc1151c96b43b4e5ac69bebc6d8d790b77ca58"},
- {file = "imagecodecs-2024.6.1-cp311-cp311-win32.whl", hash = "sha256:03ace438a843e024239cddbe7fe6940bd2a6cf3316b08c281b95842b5217c0f7"},
- {file = "imagecodecs-2024.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:cd926589c6e3c564490b93258b1a2ca3b040da10c21e99b618b7be6dd76b2a25"},
- {file = "imagecodecs-2024.6.1-cp311-cp311-win_arm64.whl", hash = "sha256:101fcef57aedb8730d1d2d1779dfbaa23daf7e50cd4130e88945a4fe34d0212f"},
- {file = "imagecodecs-2024.6.1-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:3c5e0ebdf7e1f8ec23a6d3c4b06fc7a64f41ec47ba23516458c5a763685f29e3"},
- {file = "imagecodecs-2024.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:407d2859d62ed5834e69e74d9ebcbc2d30be71e4f1ee14fae37f1179110fec8c"},
- {file = "imagecodecs-2024.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066aab64446a1d87271c8036a3d9f03dfac8678993e4e1e97923acd0d10f355"},
- {file = "imagecodecs-2024.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3467f4929fe0159c91e59a1f186a656e3aa3ad330079ab3af3d7edff7603b82"},
- {file = "imagecodecs-2024.6.1-cp312-cp312-win32.whl", hash = "sha256:a9b4939934bde291f5b107fcc01dbd6d4b4307eb36915c880600592839cab682"},
- {file = "imagecodecs-2024.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:35ddab6947bcf4c04bc0e5d171769c40ffdea07eb908e62de53d2dde3985d59d"},
- {file = "imagecodecs-2024.6.1-cp312-cp312-win_arm64.whl", hash = "sha256:0bd70e34ff9b14ea299c1cdc51db4a80c2c406ae2f422e5e400716e8df791bdc"},
- {file = "imagecodecs-2024.6.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:3d39699ddf13d8ce67b6a1a04e92a9c318613c0eecc245861590fed78f09e2a1"},
- {file = "imagecodecs-2024.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:421be324c4d0578ae1be6b75ebddf1cbe4e8092a83d31d2a8fa8021bc75e12d2"},
- {file = "imagecodecs-2024.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ababc300ec18c28e8fd515ad92252679742c243e88cdb7c5e217c72eaed8fa3a"},
- {file = "imagecodecs-2024.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc50b613738a3b62aa77c00d457a9e9a6337e9fb7a2b8163d99a368d4a346f8d"},
- {file = "imagecodecs-2024.6.1-cp39-cp39-win32.whl", hash = "sha256:38c6a929ca5356ab9ffdd4aa6dcae5156a7265f886b662bd8cfb0bca1e3d6bee"},
- {file = "imagecodecs-2024.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:01e41f59ebb7b09dc965cafd264cab0ee303d3cef981ecceb85bb556b933a8f3"},
- {file = "imagecodecs-2024.6.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:20d66ea962e9e6ea690a89f56ff9da7831378990d705c1fb133cddac2f2c507a"},
- {file = "imagecodecs-2024.6.1.tar.gz", hash = "sha256:0f3e94b7f51e2f78287b7ffae82cd850b1007639148894538274fa50bd179886"},
+ {file = "imagecodecs-2024.9.22-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:4cc21a59c6eb409bc3930dc642039eb1ff67a36b3f8d9e8c229eaede6b26557e"},
+ {file = "imagecodecs-2024.9.22-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:321ff2e6907820bdbf8350d20733f5068bf53513476d522028117aefab55fc03"},
+ {file = "imagecodecs-2024.9.22-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1608015c1e182e103d8b2ecda4a0e54595c3f846ca76fa484302283f24f3e7f"},
+ {file = "imagecodecs-2024.9.22-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432e518d74ee5b9ac7d5b1022ed29a9fdabd0eab18201220e742fde631962cf8"},
+ {file = "imagecodecs-2024.9.22-cp310-cp310-win32.whl", hash = "sha256:50d14caef565ccb4bdeb60e045b61f5d899d3caaf18e980923cdb50a181e4db2"},
+ {file = "imagecodecs-2024.9.22-cp310-cp310-win_amd64.whl", hash = "sha256:d7220e9134c3abda5e9f720dcd810031b01b8ba1a71faa8055ab6b43b5056109"},
+ {file = "imagecodecs-2024.9.22-cp311-cp311-macosx_10_14_x86_64.whl", hash = "sha256:47259f811aea089d7cdf369e6617cb336b67359835102a45ee2a49f2a8e20624"},
+ {file = "imagecodecs-2024.9.22-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:52007be4bc809104e5660805725196255cc091c248e465f588f9b4506544b886"},
+ {file = "imagecodecs-2024.9.22-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db9bcb5abd23522b119f619810cfa0217bf4756d1b8c1146a6a81635d7fb98d1"},
+ {file = "imagecodecs-2024.9.22-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:606f3c31387aa9019007cdf7e5e3fcfc4d04fc158f56a8e94340018988f5af69"},
+ {file = "imagecodecs-2024.9.22-cp311-cp311-win32.whl", hash = "sha256:180295983edbdd1220099ebe33718876d6cea6c68d9442a3771bba91de0be8c7"},
+ {file = "imagecodecs-2024.9.22-cp311-cp311-win_amd64.whl", hash = "sha256:915397c69f986da92608ec4af331b9682ad933f3d645a4e9f7b106530e57683c"},
+ {file = "imagecodecs-2024.9.22-cp311-cp311-win_arm64.whl", hash = "sha256:15e7b21488d50f95980b1f865983a6963dad1c752d51cef5bfa76bdd1a325935"},
+ {file = "imagecodecs-2024.9.22-cp312-cp312-macosx_10_14_x86_64.whl", hash = "sha256:ba7e98ad714100ae892aeadea5dd636e31eb95663f7e71fb3654fc3399f8a312"},
+ {file = "imagecodecs-2024.9.22-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d1b59ffeaf1fdc06c5da1b8faf34a5f74f914c55a7148060b1746f7684552b6f"},
+ {file = "imagecodecs-2024.9.22-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9646cd9e8933c9a181387b159392d57832fb4f4b444f2d475a6ef7ba0ea8ef8"},
+ {file = "imagecodecs-2024.9.22-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd9c62286c5aa9cdd73551c7e55c7db04424968304e53ec9240915edb9f30e23"},
+ {file = "imagecodecs-2024.9.22-cp312-cp312-win32.whl", hash = "sha256:15959cf31ea8070741318fd0d5748b734e9001b83afd8bab6fe15236c27acba0"},
+ {file = "imagecodecs-2024.9.22-cp312-cp312-win_amd64.whl", hash = "sha256:44d51f5aae669fe1eba1474144c042fbb56f4286c072f37aa86941fed865270a"},
+ {file = "imagecodecs-2024.9.22-cp312-cp312-win_arm64.whl", hash = "sha256:aa5f47ebef13f4c55b1ac24fafef5e7b340963a6a73af9d2cef2f9bfdf58bf97"},
+ {file = "imagecodecs-2024.9.22-cp313-cp313-macosx_10_14_x86_64.whl", hash = "sha256:d4bd89bc86c74439a7a828ce62e28d575db125f25cadc31bd877e2616ace2f0d"},
+ {file = "imagecodecs-2024.9.22-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c8c37f8cdeedd0e01f55b9588e82b2c7059bc1a0167ed8dd05166cad674bfbde"},
+ {file = "imagecodecs-2024.9.22-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9752c9af72ba372bbb0afca8a94f76b3096c1c54dcdb5cf18156fdc6b73403d2"},
+ {file = "imagecodecs-2024.9.22-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d9ddd053c7f262ca1333fc23f45ece7b375ddca31a0761c46e1197691e895bc3"},
+ {file = "imagecodecs-2024.9.22-cp313-cp313-win32.whl", hash = "sha256:a5dc99af846febbaaf328f03518c2e2b0d0dfbe0a1a7b781361550605c7d4c58"},
+ {file = "imagecodecs-2024.9.22-cp313-cp313-win_amd64.whl", hash = "sha256:c8951d3449f81aaf0664a8f575d431906134973f9bec93073dfc8d8247db0a1a"},
+ {file = "imagecodecs-2024.9.22-cp313-cp313-win_arm64.whl", hash = "sha256:ead06b23300b9f1958026d103aafe8eba272ff40abcb8c5db02d7711a5992cc9"},
+ {file = "imagecodecs-2024.9.22-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:fa72958dee65ce40e25f9536408b04f72a95004fe4630faa7042cf6c6c29a1d1"},
+ {file = "imagecodecs-2024.9.22-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4518e0edb5b369415bb7016097ff9cd1b2aed7a9960e21d2e616cf7e066af3fe"},
+ {file = "imagecodecs-2024.9.22-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fbbe6f5929838adc954acdd51820602d1dfd8235f8b3eb3764be58e76c6626b7"},
+ {file = "imagecodecs-2024.9.22-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58dbee11a50f2bc2e8c81f3bc1887f1b1328d61f09d9d8caa2e4050ae635fbe9"},
+ {file = "imagecodecs-2024.9.22-cp39-cp39-win32.whl", hash = "sha256:fcbbba54d0d61b6ca188d28695b244c4c5a9caaf848173015d81c91d3c0d47cb"},
+ {file = "imagecodecs-2024.9.22-cp39-cp39-win_amd64.whl", hash = "sha256:3e55abc2934442fe3055b4f8943ebe8ff6c7eb57f9f895c80ca1732f38632d9f"},
+ {file = "imagecodecs-2024.9.22-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ec3ce35e6131853beb8a39e47e59b183d034c6e9476fafda38c7ab4d8d17e1f4"},
+ {file = "imagecodecs-2024.9.22.tar.gz", hash = "sha256:fea0801b4008d25e971918d991397a351bbe76276cfa98eed2de54cb87e894a3"},
]
[package.dependencies]
@@ -1477,16 +2345,17 @@ numpy = "*"
[package.extras]
all = ["matplotlib", "numcodecs", "tifffile"]
+test = ["bitshuffle", "blosc", "blosc2", "czifile", "lz4", "numcodecs", "pyliblzfse", "pytest", "python-lzf", "python-snappy", "tifffile", "zarr (<3)", "zopflipy", "zstd"]
[[package]]
name = "imageio"
-version = "2.34.2"
+version = "2.35.1"
description = "Library for reading and writing a wide range of image, video, scientific, and volumetric data formats."
optional = false
python-versions = ">=3.8"
files = [
- {file = "imageio-2.34.2-py3-none-any.whl", hash = "sha256:a0bb27ec9d5bab36a9f4835e51b21d2cb099e1f78451441f94687ff3404b79f8"},
- {file = "imageio-2.34.2.tar.gz", hash = "sha256:5c0c0ee8faa018a1c42f649b90395dd4d3bb6187c09053a0cd6f1fdd51bbff5e"},
+ {file = "imageio-2.35.1-py3-none-any.whl", hash = "sha256:6eb2e5244e7a16b85c10b5c2fe0f7bf961b40fcb9f1a9fd1bd1d2c2f8fb3cd65"},
+ {file = "imageio-2.35.1.tar.gz", hash = "sha256:4952dfeef3c3947957f6d5dedb1f4ca31c6e509a476891062396834048aeed2a"},
]
[package.dependencies]
@@ -1496,19 +2365,20 @@ pillow = ">=8.3.2"
psutil = {version = "*", optional = true, markers = "extra == \"ffmpeg\""}
[package.extras]
-all-plugins = ["astropy", "av", "imageio-ffmpeg", "pillow-heif", "psutil", "tifffile"]
-all-plugins-pypy = ["av", "imageio-ffmpeg", "pillow-heif", "psutil", "tifffile"]
+all-plugins = ["astropy", "av", "imageio-ffmpeg", "psutil", "tifffile"]
+all-plugins-pypy = ["av", "imageio-ffmpeg", "psutil", "tifffile"]
build = ["wheel"]
dev = ["black", "flake8", "fsspec[github]", "pytest", "pytest-cov"]
docs = ["numpydoc", "pydata-sphinx-theme", "sphinx (<6)"]
ffmpeg = ["imageio-ffmpeg", "psutil"]
fits = ["astropy"]
-full = ["astropy", "av", "black", "flake8", "fsspec[github]", "gdal", "imageio-ffmpeg", "itk", "numpydoc", "pillow-heif", "psutil", "pydata-sphinx-theme", "pytest", "pytest-cov", "sphinx (<6)", "tifffile", "wheel"]
+full = ["astropy", "av", "black", "flake8", "fsspec[github]", "gdal", "imageio-ffmpeg", "itk", "numpy (>2)", "numpydoc", "pillow-heif", "psutil", "pydata-sphinx-theme", "pytest", "pytest-cov", "rawpy", "sphinx (<6)", "tifffile", "wheel"]
gdal = ["gdal"]
itk = ["itk"]
linting = ["black", "flake8"]
pillow-heif = ["pillow-heif"]
pyav = ["av"]
+rawpy = ["numpy (>2)", "rawpy"]
test = ["fsspec[github]", "pytest", "pytest-cov"]
tifffile = ["tifffile"]
@@ -1532,22 +2402,26 @@ setuptools = "*"
[[package]]
name = "importlib-metadata"
-version = "8.0.0"
+version = "8.5.0"
description = "Read metadata from Python packages"
optional = false
python-versions = ">=3.8"
files = [
- {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"},
- {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"},
+ {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"},
+ {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"},
]
[package.dependencies]
-zipp = ">=0.5"
+zipp = ">=3.20"
[package.extras]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
+cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
+enabler = ["pytest-enabler (>=2.2)"]
perf = ["ipython"]
-test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"]
+test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"]
+type = ["pytest-mypy"]
[[package]]
name = "iniconfig"
@@ -1560,6 +2434,17 @@ files = [
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
]
+[[package]]
+name = "inputs"
+version = "0.5"
+description = "Cross-platform Python support for keyboards, mice and gamepads."
+optional = true
+python-versions = "*"
+files = [
+ {file = "inputs-0.5-py2.py3-none-any.whl", hash = "sha256:13f894564e52134cf1e3862b1811da034875eb1f2b62e6021e3776e9669a96ec"},
+ {file = "inputs-0.5.tar.gz", hash = "sha256:a31d5b96a3525f1232f326be9e7ce8ccaf873c6b1fb84d9f3c9bc3d79b23eae4"},
+]
+
[[package]]
name = "inquirerpy"
version = "0.3.4"
@@ -1579,19 +2464,110 @@ prompt-toolkit = ">=3.0.1,<4.0.0"
docs = ["Sphinx (>=4.1.2,<5.0.0)", "furo (>=2021.8.17-beta.43,<2022.0.0)", "myst-parser (>=0.15.1,<0.16.0)", "sphinx-autobuild (>=2021.3.14,<2022.0.0)", "sphinx-copybutton (>=0.4.0,<0.5.0)"]
[[package]]
-name = "intel-openmp"
-version = "2021.4.0"
-description = "Intel OpenMP* Runtime Library"
-optional = false
-python-versions = "*"
+name = "ipykernel"
+version = "6.29.5"
+description = "IPython Kernel for Jupyter"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"},
+ {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"},
+]
+
+[package.dependencies]
+appnope = {version = "*", markers = "platform_system == \"Darwin\""}
+comm = ">=0.1.1"
+debugpy = ">=1.6.5"
+ipython = ">=7.23.1"
+jupyter-client = ">=6.1.12"
+jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0"
+matplotlib-inline = ">=0.1"
+nest-asyncio = "*"
+packaging = "*"
+psutil = "*"
+pyzmq = ">=24"
+tornado = ">=6.1"
+traitlets = ">=5.4.0"
+
+[package.extras]
+cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"]
+docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"]
+pyqt5 = ["pyqt5"]
+pyside6 = ["pyside6"]
+test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.23.5)", "pytest-cov", "pytest-timeout"]
+
+[[package]]
+name = "ipython"
+version = "8.28.0"
+description = "IPython: Productive Interactive Computing"
+optional = true
+python-versions = ">=3.10"
+files = [
+ {file = "ipython-8.28.0-py3-none-any.whl", hash = "sha256:530ef1e7bb693724d3cdc37287c80b07ad9b25986c007a53aa1857272dac3f35"},
+ {file = "ipython-8.28.0.tar.gz", hash = "sha256:0d0d15ca1e01faeb868ef56bc7ee5a0de5bd66885735682e8a322ae289a13d1a"},
+]
+
+[package.dependencies]
+decorator = "*"
+exceptiongroup = {version = "*", markers = "python_version < \"3.11\""}
+jedi = ">=0.16"
+matplotlib-inline = "*"
+pexpect = {version = ">4.3", markers = "sys_platform != \"win32\" and sys_platform != \"emscripten\""}
+prompt-toolkit = ">=3.0.41,<3.1.0"
+pygments = ">=2.4.0"
+stack-data = "*"
+traitlets = ">=5.13.0"
+typing-extensions = {version = ">=4.6", markers = "python_version < \"3.12\""}
+
+[package.extras]
+all = ["ipython[black,doc,kernel,matplotlib,nbconvert,nbformat,notebook,parallel,qtconsole]", "ipython[test,test-extra]"]
+black = ["black"]
+doc = ["docrepr", "exceptiongroup", "intersphinx-registry", "ipykernel", "ipython[test]", "matplotlib", "setuptools (>=18.5)", "sphinx (>=1.3)", "sphinx-rtd-theme", "sphinxcontrib-jquery", "tomli", "typing-extensions"]
+kernel = ["ipykernel"]
+matplotlib = ["matplotlib"]
+nbconvert = ["nbconvert"]
+nbformat = ["nbformat"]
+notebook = ["ipywidgets", "notebook"]
+parallel = ["ipyparallel"]
+qtconsole = ["qtconsole"]
+test = ["packaging", "pickleshare", "pytest", "pytest-asyncio (<0.22)", "testpath"]
+test-extra = ["curio", "ipython[test]", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.23)", "pandas", "trio"]
+
+[[package]]
+name = "ipywidgets"
+version = "8.1.5"
+description = "Jupyter interactive widgets"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "ipywidgets-8.1.5-py3-none-any.whl", hash = "sha256:3290f526f87ae6e77655555baba4f36681c555b8bdbbff430b70e52c34c86245"},
+ {file = "ipywidgets-8.1.5.tar.gz", hash = "sha256:870e43b1a35656a80c18c9503bbf2d16802db1cb487eec6fab27d683381dde17"},
+]
+
+[package.dependencies]
+comm = ">=0.1.3"
+ipython = ">=6.1.0"
+jupyterlab-widgets = ">=3.0.12,<3.1.0"
+traitlets = ">=4.3.1"
+widgetsnbextension = ">=4.0.12,<4.1.0"
+
+[package.extras]
+test = ["ipykernel", "jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"]
+
+[[package]]
+name = "isoduration"
+version = "20.11.0"
+description = "Operations with ISO 8601 durations"
+optional = true
+python-versions = ">=3.7"
files = [
- {file = "intel_openmp-2021.4.0-py2.py3-none-macosx_10_15_x86_64.macosx_11_0_x86_64.whl", hash = "sha256:41c01e266a7fdb631a7609191709322da2bbf24b252ba763f125dd651bcc7675"},
- {file = "intel_openmp-2021.4.0-py2.py3-none-manylinux1_i686.whl", hash = "sha256:3b921236a38384e2016f0f3d65af6732cf2c12918087128a9163225451e776f2"},
- {file = "intel_openmp-2021.4.0-py2.py3-none-manylinux1_x86_64.whl", hash = "sha256:e2240ab8d01472fed04f3544a878cda5da16c26232b7ea1b59132dbfb48b186e"},
- {file = "intel_openmp-2021.4.0-py2.py3-none-win32.whl", hash = "sha256:6e863d8fd3d7e8ef389d52cf97a50fe2afe1a19247e8c0d168ce021546f96fc9"},
- {file = "intel_openmp-2021.4.0-py2.py3-none-win_amd64.whl", hash = "sha256:eef4c8bcc8acefd7f5cd3b9384dbf73d59e2c99fc56545712ded913f43c4a94f"},
+ {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"},
+ {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"},
]
+[package.dependencies]
+arrow = ">=0.15.0"
+
[[package]]
name = "itsdangerous"
version = "2.2.0"
@@ -1603,6 +2579,25 @@ files = [
{file = "itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173"},
]
+[[package]]
+name = "jedi"
+version = "0.19.1"
+description = "An autocompletion tool for Python that can be used for text editors."
+optional = true
+python-versions = ">=3.6"
+files = [
+ {file = "jedi-0.19.1-py2.py3-none-any.whl", hash = "sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"},
+ {file = "jedi-0.19.1.tar.gz", hash = "sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd"},
+]
+
+[package.dependencies]
+parso = ">=0.8.3,<0.9.0"
+
+[package.extras]
+docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alabaster (==0.7.12)", "babel (==2.9.1)", "chardet (==4.0.0)", "commonmark (==0.8.1)", "docutils (==0.17.1)", "future (==0.18.2)", "idna (==2.10)", "imagesize (==1.2.0)", "mock (==1.0.1)", "packaging (==20.9)", "pyparsing (==2.4.7)", "pytz (==2021.1)", "readthedocs-sphinx-ext (==2.1.4)", "recommonmark (==0.5.0)", "requests (==2.25.1)", "six (==1.15.0)", "snowballstemmer (==2.1.0)", "sphinx (==1.8.5)", "sphinx-rtd-theme (==0.4.3)", "sphinxcontrib-serializinghtml (==1.1.4)", "sphinxcontrib-websupport (==1.2.4)", "urllib3 (==1.26.4)"]
+qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"]
+testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"]
+
[[package]]
name = "jinja2"
version = "3.1.4"
@@ -1620,6 +2615,467 @@ MarkupSafe = ">=2.0"
[package.extras]
i18n = ["Babel (>=2.7)"]
+[[package]]
+name = "json5"
+version = "0.9.25"
+description = "A Python implementation of the JSON5 data format."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"},
+ {file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"},
+]
+
+[[package]]
+name = "jsonlines"
+version = "4.0.0"
+description = "Library with helpers for the jsonlines file format"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "jsonlines-4.0.0-py3-none-any.whl", hash = "sha256:185b334ff2ca5a91362993f42e83588a360cf95ce4b71a73548502bda52a7c55"},
+ {file = "jsonlines-4.0.0.tar.gz", hash = "sha256:0c6d2c09117550c089995247f605ae4cf77dd1533041d366351f6f298822ea74"},
+]
+
+[package.dependencies]
+attrs = ">=19.2.0"
+
+[[package]]
+name = "jsonpointer"
+version = "3.0.0"
+description = "Identify specific nodes in a JSON document (RFC 6901)"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"},
+ {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"},
+]
+
+[[package]]
+name = "jsonschema"
+version = "4.23.0"
+description = "An implementation of JSON Schema validation for Python"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"},
+ {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"},
+]
+
+[package.dependencies]
+attrs = ">=22.2.0"
+fqdn = {version = "*", optional = true, markers = "extra == \"format-nongpl\""}
+idna = {version = "*", optional = true, markers = "extra == \"format-nongpl\""}
+isoduration = {version = "*", optional = true, markers = "extra == \"format-nongpl\""}
+jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format-nongpl\""}
+jsonschema-specifications = ">=2023.03.6"
+referencing = ">=0.28.4"
+rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format-nongpl\""}
+rfc3986-validator = {version = ">0.1.0", optional = true, markers = "extra == \"format-nongpl\""}
+rpds-py = ">=0.7.1"
+uri-template = {version = "*", optional = true, markers = "extra == \"format-nongpl\""}
+webcolors = {version = ">=24.6.0", optional = true, markers = "extra == \"format-nongpl\""}
+
+[package.extras]
+format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"]
+format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"]
+
+[[package]]
+name = "jsonschema-specifications"
+version = "2024.10.1"
+description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
+optional = true
+python-versions = ">=3.9"
+files = [
+ {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"},
+ {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"},
+]
+
+[package.dependencies]
+referencing = ">=0.31.0"
+
+[[package]]
+name = "jupyter"
+version = "1.1.1"
+description = "Jupyter metapackage. Install all the Jupyter components in one go."
+optional = true
+python-versions = "*"
+files = [
+ {file = "jupyter-1.1.1-py2.py3-none-any.whl", hash = "sha256:7a59533c22af65439b24bbe60373a4e95af8f16ac65a6c00820ad378e3f7cc83"},
+ {file = "jupyter-1.1.1.tar.gz", hash = "sha256:d55467bceabdea49d7e3624af7e33d59c37fff53ed3a350e1ac957bed731de7a"},
+]
+
+[package.dependencies]
+ipykernel = "*"
+ipywidgets = "*"
+jupyter-console = "*"
+jupyterlab = "*"
+nbconvert = "*"
+notebook = "*"
+
+[[package]]
+name = "jupyter-client"
+version = "8.6.3"
+description = "Jupyter protocol implementation and client libraries"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f"},
+ {file = "jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419"},
+]
+
+[package.dependencies]
+jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0"
+python-dateutil = ">=2.8.2"
+pyzmq = ">=23.0"
+tornado = ">=6.2"
+traitlets = ">=5.3"
+
+[package.extras]
+docs = ["ipykernel", "myst-parser", "pydata-sphinx-theme", "sphinx (>=4)", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"]
+test = ["coverage", "ipykernel (>=6.14)", "mypy", "paramiko", "pre-commit", "pytest (<8.2.0)", "pytest-cov", "pytest-jupyter[client] (>=0.4.1)", "pytest-timeout"]
+
+[[package]]
+name = "jupyter-console"
+version = "6.6.3"
+description = "Jupyter terminal console"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "jupyter_console-6.6.3-py3-none-any.whl", hash = "sha256:309d33409fcc92ffdad25f0bcdf9a4a9daa61b6f341177570fdac03de5352485"},
+ {file = "jupyter_console-6.6.3.tar.gz", hash = "sha256:566a4bf31c87adbfadf22cdf846e3069b59a71ed5da71d6ba4d8aaad14a53539"},
+]
+
+[package.dependencies]
+ipykernel = ">=6.14"
+ipython = "*"
+jupyter-client = ">=7.0.0"
+jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0"
+prompt-toolkit = ">=3.0.30"
+pygments = "*"
+pyzmq = ">=17"
+traitlets = ">=5.4"
+
+[package.extras]
+test = ["flaky", "pexpect", "pytest"]
+
+[[package]]
+name = "jupyter-core"
+version = "5.7.2"
+description = "Jupyter core package. A base package on which Jupyter projects rely."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"},
+ {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"},
+]
+
+[package.dependencies]
+platformdirs = ">=2.5"
+traitlets = ">=5.3"
+
+[package.extras]
+docs = ["myst-parser", "pydata-sphinx-theme", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"]
+test = ["ipykernel", "pre-commit", "pytest (<8)", "pytest-cov", "pytest-timeout"]
+
+[[package]]
+name = "jupyter-events"
+version = "0.10.0"
+description = "Jupyter Event System library"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "jupyter_events-0.10.0-py3-none-any.whl", hash = "sha256:4b72130875e59d57716d327ea70d3ebc3af1944d3717e5a498b8a06c6c159960"},
+ {file = "jupyter_events-0.10.0.tar.gz", hash = "sha256:670b8229d3cc882ec782144ed22e0d29e1c2d639263f92ca8383e66682845e22"},
+]
+
+[package.dependencies]
+jsonschema = {version = ">=4.18.0", extras = ["format-nongpl"]}
+python-json-logger = ">=2.0.4"
+pyyaml = ">=5.3"
+referencing = "*"
+rfc3339-validator = "*"
+rfc3986-validator = ">=0.1.1"
+traitlets = ">=5.3"
+
+[package.extras]
+cli = ["click", "rich"]
+docs = ["jupyterlite-sphinx", "myst-parser", "pydata-sphinx-theme", "sphinxcontrib-spelling"]
+test = ["click", "pre-commit", "pytest (>=7.0)", "pytest-asyncio (>=0.19.0)", "pytest-console-scripts", "rich"]
+
+[[package]]
+name = "jupyter-lsp"
+version = "2.2.5"
+description = "Multi-Language Server WebSocket proxy for Jupyter Notebook/Lab server"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "jupyter-lsp-2.2.5.tar.gz", hash = "sha256:793147a05ad446f809fd53ef1cd19a9f5256fd0a2d6b7ce943a982cb4f545001"},
+ {file = "jupyter_lsp-2.2.5-py3-none-any.whl", hash = "sha256:45fbddbd505f3fbfb0b6cb2f1bc5e15e83ab7c79cd6e89416b248cb3c00c11da"},
+]
+
+[package.dependencies]
+jupyter-server = ">=1.1.2"
+
+[[package]]
+name = "jupyter-server"
+version = "2.14.2"
+description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "jupyter_server-2.14.2-py3-none-any.whl", hash = "sha256:47ff506127c2f7851a17bf4713434208fc490955d0e8632e95014a9a9afbeefd"},
+ {file = "jupyter_server-2.14.2.tar.gz", hash = "sha256:66095021aa9638ced276c248b1d81862e4c50f292d575920bbe960de1c56b12b"},
+]
+
+[package.dependencies]
+anyio = ">=3.1.0"
+argon2-cffi = ">=21.1"
+jinja2 = ">=3.0.3"
+jupyter-client = ">=7.4.4"
+jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0"
+jupyter-events = ">=0.9.0"
+jupyter-server-terminals = ">=0.4.4"
+nbconvert = ">=6.4.4"
+nbformat = ">=5.3.0"
+overrides = ">=5.0"
+packaging = ">=22.0"
+prometheus-client = ">=0.9"
+pywinpty = {version = ">=2.0.1", markers = "os_name == \"nt\""}
+pyzmq = ">=24"
+send2trash = ">=1.8.2"
+terminado = ">=0.8.3"
+tornado = ">=6.2.0"
+traitlets = ">=5.6.0"
+websocket-client = ">=1.7"
+
+[package.extras]
+docs = ["ipykernel", "jinja2", "jupyter-client", "myst-parser", "nbformat", "prometheus-client", "pydata-sphinx-theme", "send2trash", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-openapi (>=0.8.0)", "sphinxcontrib-spelling", "sphinxemoji", "tornado", "typing-extensions"]
+test = ["flaky", "ipykernel", "pre-commit", "pytest (>=7.0,<9)", "pytest-console-scripts", "pytest-jupyter[server] (>=0.7)", "pytest-timeout", "requests"]
+
+[[package]]
+name = "jupyter-server-terminals"
+version = "0.5.3"
+description = "A Jupyter Server Extension Providing Terminals."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "jupyter_server_terminals-0.5.3-py3-none-any.whl", hash = "sha256:41ee0d7dc0ebf2809c668e0fc726dfaf258fcd3e769568996ca731b6194ae9aa"},
+ {file = "jupyter_server_terminals-0.5.3.tar.gz", hash = "sha256:5ae0295167220e9ace0edcfdb212afd2b01ee8d179fe6f23c899590e9b8a5269"},
+]
+
+[package.dependencies]
+pywinpty = {version = ">=2.0.3", markers = "os_name == \"nt\""}
+terminado = ">=0.8.3"
+
+[package.extras]
+docs = ["jinja2", "jupyter-server", "mistune (<4.0)", "myst-parser", "nbformat", "packaging", "pydata-sphinx-theme", "sphinxcontrib-github-alt", "sphinxcontrib-openapi", "sphinxcontrib-spelling", "sphinxemoji", "tornado"]
+test = ["jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-jupyter[server] (>=0.5.3)", "pytest-timeout"]
+
+[[package]]
+name = "jupyterlab"
+version = "4.2.5"
+description = "JupyterLab computational environment"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "jupyterlab-4.2.5-py3-none-any.whl", hash = "sha256:73b6e0775d41a9fee7ee756c80f58a6bed4040869ccc21411dc559818874d321"},
+ {file = "jupyterlab-4.2.5.tar.gz", hash = "sha256:ae7f3a1b8cb88b4f55009ce79fa7c06f99d70cd63601ee4aa91815d054f46f75"},
+]
+
+[package.dependencies]
+async-lru = ">=1.0.0"
+httpx = ">=0.25.0"
+ipykernel = ">=6.5.0"
+jinja2 = ">=3.0.3"
+jupyter-core = "*"
+jupyter-lsp = ">=2.0.0"
+jupyter-server = ">=2.4.0,<3"
+jupyterlab-server = ">=2.27.1,<3"
+notebook-shim = ">=0.2"
+packaging = "*"
+setuptools = ">=40.1.0"
+tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""}
+tornado = ">=6.2.0"
+traitlets = "*"
+
+[package.extras]
+dev = ["build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.3.5)"]
+docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-jupyter", "sphinx (>=1.8,<7.3.0)", "sphinx-copybutton"]
+docs-screenshots = ["altair (==5.3.0)", "ipython (==8.16.1)", "ipywidgets (==8.1.2)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.1.post2)", "matplotlib (==3.8.3)", "nbconvert (>=7.0.0)", "pandas (==2.2.1)", "scipy (==1.12.0)", "vega-datasets (==0.9.0)"]
+test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"]
+upgrade-extension = ["copier (>=9,<10)", "jinja2-time (<0.3)", "pydantic (<3.0)", "pyyaml-include (<3.0)", "tomli-w (<2.0)"]
+
+[[package]]
+name = "jupyterlab-pygments"
+version = "0.3.0"
+description = "Pygments theme using JupyterLab CSS variables"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "jupyterlab_pygments-0.3.0-py3-none-any.whl", hash = "sha256:841a89020971da1d8693f1a99997aefc5dc424bb1b251fd6322462a1b8842780"},
+ {file = "jupyterlab_pygments-0.3.0.tar.gz", hash = "sha256:721aca4d9029252b11cfa9d185e5b5af4d54772bb8072f9b7036f4170054d35d"},
+]
+
+[[package]]
+name = "jupyterlab-server"
+version = "2.27.3"
+description = "A set of server components for JupyterLab and JupyterLab like applications."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "jupyterlab_server-2.27.3-py3-none-any.whl", hash = "sha256:e697488f66c3db49df675158a77b3b017520d772c6e1548c7d9bcc5df7944ee4"},
+ {file = "jupyterlab_server-2.27.3.tar.gz", hash = "sha256:eb36caca59e74471988f0ae25c77945610b887f777255aa21f8065def9e51ed4"},
+]
+
+[package.dependencies]
+babel = ">=2.10"
+jinja2 = ">=3.0.3"
+json5 = ">=0.9.0"
+jsonschema = ">=4.18.0"
+jupyter-server = ">=1.21,<3"
+packaging = ">=21.3"
+requests = ">=2.31"
+
+[package.extras]
+docs = ["autodoc-traits", "jinja2 (<3.2.0)", "mistune (<4)", "myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-copybutton", "sphinxcontrib-openapi (>0.8)"]
+openapi = ["openapi-core (>=0.18.0,<0.19.0)", "ruamel-yaml"]
+test = ["hatch", "ipykernel", "openapi-core (>=0.18.0,<0.19.0)", "openapi-spec-validator (>=0.6.0,<0.8.0)", "pytest (>=7.0,<8)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter[server] (>=0.6.2)", "pytest-timeout", "requests-mock", "ruamel-yaml", "sphinxcontrib-spelling", "strict-rfc3339", "werkzeug"]
+
+[[package]]
+name = "jupyterlab-widgets"
+version = "3.0.13"
+description = "Jupyter interactive widgets for JupyterLab"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "jupyterlab_widgets-3.0.13-py3-none-any.whl", hash = "sha256:e3cda2c233ce144192f1e29914ad522b2f4c40e77214b0cc97377ca3d323db54"},
+ {file = "jupyterlab_widgets-3.0.13.tar.gz", hash = "sha256:a2966d385328c1942b683a8cd96b89b8dd82c8b8f81dda902bb2bc06d46f5bed"},
+]
+
+[[package]]
+name = "kiwisolver"
+version = "1.4.7"
+description = "A fast implementation of the Cassowary constraint solver"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "kiwisolver-1.4.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8a9c83f75223d5e48b0bc9cb1bf2776cf01563e00ade8775ffe13b0b6e1af3a6"},
+ {file = "kiwisolver-1.4.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58370b1ffbd35407444d57057b57da5d6549d2d854fa30249771775c63b5fe17"},
+ {file = "kiwisolver-1.4.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aa0abdf853e09aff551db11fce173e2177d00786c688203f52c87ad7fcd91ef9"},
+ {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8d53103597a252fb3ab8b5845af04c7a26d5e7ea8122303dd7a021176a87e8b9"},
+ {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:88f17c5ffa8e9462fb79f62746428dd57b46eb931698e42e990ad63103f35e6c"},
+ {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a9ca9c710d598fd75ee5de59d5bda2684d9db36a9f50b6125eaea3969c2599"},
+ {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f4d742cb7af1c28303a51b7a27aaee540e71bb8e24f68c736f6f2ffc82f2bf05"},
+ {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28c7fea2196bf4c2f8d46a0415c77a1c480cc0724722f23d7410ffe9842c407"},
+ {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e968b84db54f9d42046cf154e02911e39c0435c9801681e3fc9ce8a3c4130278"},
+ {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0c18ec74c0472de033e1bebb2911c3c310eef5649133dd0bedf2a169a1b269e5"},
+ {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8f0ea6da6d393d8b2e187e6a5e3fb81f5862010a40c3945e2c6d12ae45cfb2ad"},
+ {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:f106407dda69ae456dd1227966bf445b157ccc80ba0dff3802bb63f30b74e895"},
+ {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:84ec80df401cfee1457063732d90022f93951944b5b58975d34ab56bb150dfb3"},
+ {file = "kiwisolver-1.4.7-cp310-cp310-win32.whl", hash = "sha256:71bb308552200fb2c195e35ef05de12f0c878c07fc91c270eb3d6e41698c3bcc"},
+ {file = "kiwisolver-1.4.7-cp310-cp310-win_amd64.whl", hash = "sha256:44756f9fd339de0fb6ee4f8c1696cfd19b2422e0d70b4cefc1cc7f1f64045a8c"},
+ {file = "kiwisolver-1.4.7-cp310-cp310-win_arm64.whl", hash = "sha256:78a42513018c41c2ffd262eb676442315cbfe3c44eed82385c2ed043bc63210a"},
+ {file = "kiwisolver-1.4.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d2b0e12a42fb4e72d509fc994713d099cbb15ebf1103545e8a45f14da2dfca54"},
+ {file = "kiwisolver-1.4.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2a8781ac3edc42ea4b90bc23e7d37b665d89423818e26eb6df90698aa2287c95"},
+ {file = "kiwisolver-1.4.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:46707a10836894b559e04b0fd143e343945c97fd170d69a2d26d640b4e297935"},
+ {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef97b8df011141c9b0f6caf23b29379f87dd13183c978a30a3c546d2c47314cb"},
+ {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab58c12a2cd0fc769089e6d38466c46d7f76aced0a1f54c77652446733d2d02"},
+ {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:803b8e1459341c1bb56d1c5c010406d5edec8a0713a0945851290a7930679b51"},
+ {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9a9e8a507420fe35992ee9ecb302dab68550dedc0da9e2880dd88071c5fb052"},
+ {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18077b53dc3bb490e330669a99920c5e6a496889ae8c63b58fbc57c3d7f33a18"},
+ {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6af936f79086a89b3680a280c47ea90b4df7047b5bdf3aa5c524bbedddb9e545"},
+ {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3abc5b19d24af4b77d1598a585b8a719beb8569a71568b66f4ebe1fb0449460b"},
+ {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:933d4de052939d90afbe6e9d5273ae05fb836cc86c15b686edd4b3560cc0ee36"},
+ {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:65e720d2ab2b53f1f72fb5da5fb477455905ce2c88aaa671ff0a447c2c80e8e3"},
+ {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3bf1ed55088f214ba6427484c59553123fdd9b218a42bbc8c6496d6754b1e523"},
+ {file = "kiwisolver-1.4.7-cp311-cp311-win32.whl", hash = "sha256:4c00336b9dd5ad96d0a558fd18a8b6f711b7449acce4c157e7343ba92dd0cf3d"},
+ {file = "kiwisolver-1.4.7-cp311-cp311-win_amd64.whl", hash = "sha256:929e294c1ac1e9f615c62a4e4313ca1823ba37326c164ec720a803287c4c499b"},
+ {file = "kiwisolver-1.4.7-cp311-cp311-win_arm64.whl", hash = "sha256:e33e8fbd440c917106b237ef1a2f1449dfbb9b6f6e1ce17c94cd6a1e0d438376"},
+ {file = "kiwisolver-1.4.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:5360cc32706dab3931f738d3079652d20982511f7c0ac5711483e6eab08efff2"},
+ {file = "kiwisolver-1.4.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942216596dc64ddb25adb215c3c783215b23626f8d84e8eff8d6d45c3f29f75a"},
+ {file = "kiwisolver-1.4.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:48b571ecd8bae15702e4f22d3ff6a0f13e54d3d00cd25216d5e7f658242065ee"},
+ {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad42ba922c67c5f219097b28fae965e10045ddf145d2928bfac2eb2e17673640"},
+ {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:612a10bdae23404a72941a0fc8fa2660c6ea1217c4ce0dbcab8a8f6543ea9e7f"},
+ {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e838bba3a3bac0fe06d849d29772eb1afb9745a59710762e4ba3f4cb8424483"},
+ {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:22f499f6157236c19f4bbbd472fa55b063db77a16cd74d49afe28992dff8c258"},
+ {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693902d433cf585133699972b6d7c42a8b9f8f826ebcaf0132ff55200afc599e"},
+ {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4e77f2126c3e0b0d055f44513ed349038ac180371ed9b52fe96a32aa071a5107"},
+ {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:657a05857bda581c3656bfc3b20e353c232e9193eb167766ad2dc58b56504948"},
+ {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4bfa75a048c056a411f9705856abfc872558e33c055d80af6a380e3658766038"},
+ {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:34ea1de54beef1c104422d210c47c7d2a4999bdecf42c7b5718fbe59a4cac383"},
+ {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:90da3b5f694b85231cf93586dad5e90e2d71b9428f9aad96952c99055582f520"},
+ {file = "kiwisolver-1.4.7-cp312-cp312-win32.whl", hash = "sha256:18e0cca3e008e17fe9b164b55735a325140a5a35faad8de92dd80265cd5eb80b"},
+ {file = "kiwisolver-1.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:58cb20602b18f86f83a5c87d3ee1c766a79c0d452f8def86d925e6c60fbf7bfb"},
+ {file = "kiwisolver-1.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:f5a8b53bdc0b3961f8b6125e198617c40aeed638b387913bf1ce78afb1b0be2a"},
+ {file = "kiwisolver-1.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2e6039dcbe79a8e0f044f1c39db1986a1b8071051efba3ee4d74f5b365f5226e"},
+ {file = "kiwisolver-1.4.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a1ecf0ac1c518487d9d23b1cd7139a6a65bc460cd101ab01f1be82ecf09794b6"},
+ {file = "kiwisolver-1.4.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7ab9ccab2b5bd5702ab0803676a580fffa2aa178c2badc5557a84cc943fcf750"},
+ {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f816dd2277f8d63d79f9c8473a79fe54047bc0467754962840782c575522224d"},
+ {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf8bcc23ceb5a1b624572a1623b9f79d2c3b337c8c455405ef231933a10da379"},
+ {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dea0bf229319828467d7fca8c7c189780aa9ff679c94539eed7532ebe33ed37c"},
+ {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c06a4c7cf15ec739ce0e5971b26c93638730090add60e183530d70848ebdd34"},
+ {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:913983ad2deb14e66d83c28b632fd35ba2b825031f2fa4ca29675e665dfecbe1"},
+ {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5337ec7809bcd0f424c6b705ecf97941c46279cf5ed92311782c7c9c2026f07f"},
+ {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4c26ed10c4f6fa6ddb329a5120ba3b6db349ca192ae211e882970bfc9d91420b"},
+ {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c619b101e6de2222c1fcb0531e1b17bbffbe54294bfba43ea0d411d428618c27"},
+ {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:073a36c8273647592ea332e816e75ef8da5c303236ec0167196793eb1e34657a"},
+ {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3ce6b2b0231bda412463e152fc18335ba32faf4e8c23a754ad50ffa70e4091ee"},
+ {file = "kiwisolver-1.4.7-cp313-cp313-win32.whl", hash = "sha256:f4c9aee212bc89d4e13f58be11a56cc8036cabad119259d12ace14b34476fd07"},
+ {file = "kiwisolver-1.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:8a3ec5aa8e38fc4c8af308917ce12c536f1c88452ce554027e55b22cbbfbff76"},
+ {file = "kiwisolver-1.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:76c8094ac20ec259471ac53e774623eb62e6e1f56cd8690c67ce6ce4fcb05650"},
+ {file = "kiwisolver-1.4.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5d5abf8f8ec1f4e22882273c423e16cae834c36856cac348cfbfa68e01c40f3a"},
+ {file = "kiwisolver-1.4.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:aeb3531b196ef6f11776c21674dba836aeea9d5bd1cf630f869e3d90b16cfade"},
+ {file = "kiwisolver-1.4.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7d755065e4e866a8086c9bdada157133ff466476a2ad7861828e17b6026e22c"},
+ {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08471d4d86cbaec61f86b217dd938a83d85e03785f51121e791a6e6689a3be95"},
+ {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7bbfcb7165ce3d54a3dfbe731e470f65739c4c1f85bb1018ee912bae139e263b"},
+ {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d34eb8494bea691a1a450141ebb5385e4b69d38bb8403b5146ad279f4b30fa3"},
+ {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9242795d174daa40105c1d86aba618e8eab7bf96ba8c3ee614da8302a9f95503"},
+ {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a0f64a48bb81af7450e641e3fe0b0394d7381e342805479178b3d335d60ca7cf"},
+ {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8e045731a5416357638d1700927529e2b8ab304811671f665b225f8bf8d8f933"},
+ {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4322872d5772cae7369f8351da1edf255a604ea7087fe295411397d0cfd9655e"},
+ {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e1631290ee9271dffe3062d2634c3ecac02c83890ada077d225e081aca8aab89"},
+ {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:edcfc407e4eb17e037bca59be0e85a2031a2ac87e4fed26d3e9df88b4165f92d"},
+ {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4d05d81ecb47d11e7f8932bd8b61b720bf0b41199358f3f5e36d38e28f0532c5"},
+ {file = "kiwisolver-1.4.7-cp38-cp38-win32.whl", hash = "sha256:b38ac83d5f04b15e515fd86f312479d950d05ce2368d5413d46c088dda7de90a"},
+ {file = "kiwisolver-1.4.7-cp38-cp38-win_amd64.whl", hash = "sha256:d83db7cde68459fc803052a55ace60bea2bae361fc3b7a6d5da07e11954e4b09"},
+ {file = "kiwisolver-1.4.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f9362ecfca44c863569d3d3c033dbe8ba452ff8eed6f6b5806382741a1334bd"},
+ {file = "kiwisolver-1.4.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8df2eb9b2bac43ef8b082e06f750350fbbaf2887534a5be97f6cf07b19d9583"},
+ {file = "kiwisolver-1.4.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f32d6edbc638cde7652bd690c3e728b25332acbadd7cad670cc4a02558d9c417"},
+ {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e2e6c39bd7b9372b0be21456caab138e8e69cc0fc1190a9dfa92bd45a1e6e904"},
+ {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dda56c24d869b1193fcc763f1284b9126550eaf84b88bbc7256e15028f19188a"},
+ {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79849239c39b5e1fd906556c474d9b0439ea6792b637511f3fe3a41158d89ca8"},
+ {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e3bc157fed2a4c02ec468de4ecd12a6e22818d4f09cde2c31ee3226ffbefab2"},
+ {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3da53da805b71e41053dc670f9a820d1157aae77b6b944e08024d17bcd51ef88"},
+ {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8705f17dfeb43139a692298cb6637ee2e59c0194538153e83e9ee0c75c2eddde"},
+ {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:82a5c2f4b87c26bb1a0ef3d16b5c4753434633b83d365cc0ddf2770c93829e3c"},
+ {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce8be0466f4c0d585cdb6c1e2ed07232221df101a4c6f28821d2aa754ca2d9e2"},
+ {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:409afdfe1e2e90e6ee7fc896f3df9a7fec8e793e58bfa0d052c8a82f99c37abb"},
+ {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5b9c3f4ee0b9a439d2415012bd1b1cc2df59e4d6a9939f4d669241d30b414327"},
+ {file = "kiwisolver-1.4.7-cp39-cp39-win32.whl", hash = "sha256:a79ae34384df2b615eefca647a2873842ac3b596418032bef9a7283675962644"},
+ {file = "kiwisolver-1.4.7-cp39-cp39-win_amd64.whl", hash = "sha256:cf0438b42121a66a3a667de17e779330fc0f20b0d97d59d2f2121e182b0505e4"},
+ {file = "kiwisolver-1.4.7-cp39-cp39-win_arm64.whl", hash = "sha256:764202cc7e70f767dab49e8df52c7455e8de0df5d858fa801a11aa0d882ccf3f"},
+ {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:94252291e3fe68001b1dd747b4c0b3be12582839b95ad4d1b641924d68fd4643"},
+ {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b7dfa3b546da08a9f622bb6becdb14b3e24aaa30adba66749d38f3cc7ea9706"},
+ {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd3de6481f4ed8b734da5df134cd5a6a64fe32124fe83dde1e5b5f29fe30b1e6"},
+ {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a91b5f9f1205845d488c928e8570dcb62b893372f63b8b6e98b863ebd2368ff2"},
+ {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40fa14dbd66b8b8f470d5fc79c089a66185619d31645f9b0773b88b19f7223c4"},
+ {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:eb542fe7933aa09d8d8f9d9097ef37532a7df6497819d16efe4359890a2f417a"},
+ {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bfa1acfa0c54932d5607e19a2c24646fb4c1ae2694437789129cf099789a3b00"},
+ {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:eee3ea935c3d227d49b4eb85660ff631556841f6e567f0f7bda972df6c2c9935"},
+ {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f3160309af4396e0ed04db259c3ccbfdc3621b5559b5453075e5de555e1f3a1b"},
+ {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a17f6a29cf8935e587cc8a4dbfc8368c55edc645283db0ce9801016f83526c2d"},
+ {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10849fb2c1ecbfae45a693c070e0320a91b35dd4bcf58172c023b994283a124d"},
+ {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:ac542bf38a8a4be2dc6b15248d36315ccc65f0743f7b1a76688ffb6b5129a5c2"},
+ {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8b01aac285f91ca889c800042c35ad3b239e704b150cfd3382adfc9dcc780e39"},
+ {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:48be928f59a1f5c8207154f935334d374e79f2b5d212826307d072595ad76a2e"},
+ {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f37cfe618a117e50d8c240555331160d73d0411422b59b5ee217843d7b693608"},
+ {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:599b5c873c63a1f6ed7eead644a8a380cfbdf5db91dcb6f85707aaab213b1674"},
+ {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:801fa7802e5cfabe3ab0c81a34c323a319b097dfb5004be950482d882f3d7225"},
+ {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0c6c43471bc764fad4bc99c5c2d6d16a676b1abf844ca7c8702bdae92df01ee0"},
+ {file = "kiwisolver-1.4.7.tar.gz", hash = "sha256:9893ff81bd7107f7b685d3017cc6583daadb4fc26e4a888350df530e41980a60"},
+]
+
[[package]]
name = "labmaze"
version = "1.0.6"
@@ -1715,153 +3171,149 @@ files = [
[[package]]
name = "lxml"
-version = "5.2.2"
+version = "5.3.0"
description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API."
optional = true
python-versions = ">=3.6"
files = [
- {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:364d03207f3e603922d0d3932ef363d55bbf48e3647395765f9bfcbdf6d23632"},
- {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50127c186f191b8917ea2fb8b206fbebe87fd414a6084d15568c27d0a21d60db"},
- {file = "lxml-5.2.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4f025ef3db1c6da4460dd27c118d8cd136d0391da4e387a15e48e5c975147"},
- {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981a06a3076997adf7c743dcd0d7a0415582661e2517c7d961493572e909aa1d"},
- {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aef5474d913d3b05e613906ba4090433c515e13ea49c837aca18bde190853dff"},
- {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e275ea572389e41e8b039ac076a46cb87ee6b8542df3fff26f5baab43713bca"},
- {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5b65529bb2f21ac7861a0e94fdbf5dc0daab41497d18223b46ee8515e5ad297"},
- {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bcc98f911f10278d1daf14b87d65325851a1d29153caaf146877ec37031d5f36"},
- {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:b47633251727c8fe279f34025844b3b3a3e40cd1b198356d003aa146258d13a2"},
- {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:fbc9d316552f9ef7bba39f4edfad4a734d3d6f93341232a9dddadec4f15d425f"},
- {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:13e69be35391ce72712184f69000cda04fc89689429179bc4c0ae5f0b7a8c21b"},
- {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3b6a30a9ab040b3f545b697cb3adbf3696c05a3a68aad172e3fd7ca73ab3c835"},
- {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a233bb68625a85126ac9f1fc66d24337d6e8a0f9207b688eec2e7c880f012ec0"},
- {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:dfa7c241073d8f2b8e8dbc7803c434f57dbb83ae2a3d7892dd068d99e96efe2c"},
- {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a7aca7964ac4bb07680d5c9d63b9d7028cace3e2d43175cb50bba8c5ad33316"},
- {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae4073a60ab98529ab8a72ebf429f2a8cc612619a8c04e08bed27450d52103c0"},
- {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ffb2be176fed4457e445fe540617f0252a72a8bc56208fd65a690fdb1f57660b"},
- {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e290d79a4107d7d794634ce3e985b9ae4f920380a813717adf61804904dc4393"},
- {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96e85aa09274955bb6bd483eaf5b12abadade01010478154b0ec70284c1b1526"},
- {file = "lxml-5.2.2-cp310-cp310-win32.whl", hash = "sha256:f956196ef61369f1685d14dad80611488d8dc1ef00be57c0c5a03064005b0f30"},
- {file = "lxml-5.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:875a3f90d7eb5c5d77e529080d95140eacb3c6d13ad5b616ee8095447b1d22e7"},
- {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45f9494613160d0405682f9eee781c7e6d1bf45f819654eb249f8f46a2c22545"},
- {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0b3f2df149efb242cee2ffdeb6674b7f30d23c9a7af26595099afaf46ef4e88"},
- {file = "lxml-5.2.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d28cb356f119a437cc58a13f8135ab8a4c8ece18159eb9194b0d269ec4e28083"},
- {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:657a972f46bbefdbba2d4f14413c0d079f9ae243bd68193cb5061b9732fa54c1"},
- {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b9ea10063efb77a965a8d5f4182806fbf59ed068b3c3fd6f30d2ac7bee734"},
- {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07542787f86112d46d07d4f3c4e7c760282011b354d012dc4141cc12a68cef5f"},
- {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:303f540ad2dddd35b92415b74b900c749ec2010e703ab3bfd6660979d01fd4ed"},
- {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2eb2227ce1ff998faf0cd7fe85bbf086aa41dfc5af3b1d80867ecfe75fb68df3"},
- {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:1d8a701774dfc42a2f0b8ccdfe7dbc140500d1049e0632a611985d943fcf12df"},
- {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:56793b7a1a091a7c286b5f4aa1fe4ae5d1446fe742d00cdf2ffb1077865db10d"},
- {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eb00b549b13bd6d884c863554566095bf6fa9c3cecb2e7b399c4bc7904cb33b5"},
- {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a2569a1f15ae6c8c64108a2cd2b4a858fc1e13d25846be0666fc144715e32ab"},
- {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:8cf85a6e40ff1f37fe0f25719aadf443686b1ac7652593dc53c7ef9b8492b115"},
- {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d237ba6664b8e60fd90b8549a149a74fcc675272e0e95539a00522e4ca688b04"},
- {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0b3f5016e00ae7630a4b83d0868fca1e3d494c78a75b1c7252606a3a1c5fc2ad"},
- {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23441e2b5339bc54dc949e9e675fa35efe858108404ef9aa92f0456929ef6fe8"},
- {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2fb0ba3e8566548d6c8e7dd82a8229ff47bd8fb8c2da237607ac8e5a1b8312e5"},
- {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:79d1fb9252e7e2cfe4de6e9a6610c7cbb99b9708e2c3e29057f487de5a9eaefa"},
- {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6dcc3d17eac1df7859ae01202e9bb11ffa8c98949dcbeb1069c8b9a75917e01b"},
- {file = "lxml-5.2.2-cp311-cp311-win32.whl", hash = "sha256:4c30a2f83677876465f44c018830f608fa3c6a8a466eb223535035fbc16f3438"},
- {file = "lxml-5.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:49095a38eb333aaf44c06052fd2ec3b8f23e19747ca7ec6f6c954ffea6dbf7be"},
- {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7429e7faa1a60cad26ae4227f4dd0459efde239e494c7312624ce228e04f6391"},
- {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:50ccb5d355961c0f12f6cf24b7187dbabd5433f29e15147a67995474f27d1776"},
- {file = "lxml-5.2.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc911208b18842a3a57266d8e51fc3cfaccee90a5351b92079beed912a7914c2"},
- {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33ce9e786753743159799fdf8e92a5da351158c4bfb6f2db0bf31e7892a1feb5"},
- {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec87c44f619380878bd49ca109669c9f221d9ae6883a5bcb3616785fa8f94c97"},
- {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08ea0f606808354eb8f2dfaac095963cb25d9d28e27edcc375d7b30ab01abbf6"},
- {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75a9632f1d4f698b2e6e2e1ada40e71f369b15d69baddb8968dcc8e683839b18"},
- {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74da9f97daec6928567b48c90ea2c82a106b2d500f397eeb8941e47d30b1ca85"},
- {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:0969e92af09c5687d769731e3f39ed62427cc72176cebb54b7a9d52cc4fa3b73"},
- {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:9164361769b6ca7769079f4d426a41df6164879f7f3568be9086e15baca61466"},
- {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d26a618ae1766279f2660aca0081b2220aca6bd1aa06b2cf73f07383faf48927"},
- {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab67ed772c584b7ef2379797bf14b82df9aa5f7438c5b9a09624dd834c1c1aaf"},
- {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3d1e35572a56941b32c239774d7e9ad724074d37f90c7a7d499ab98761bd80cf"},
- {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8268cbcd48c5375f46e000adb1390572c98879eb4f77910c6053d25cc3ac2c67"},
- {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e282aedd63c639c07c3857097fc0e236f984ceb4089a8b284da1c526491e3f3d"},
- {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfdc2bfe69e9adf0df4915949c22a25b39d175d599bf98e7ddf620a13678585"},
- {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4aefd911793b5d2d7a921233a54c90329bf3d4a6817dc465f12ffdfe4fc7b8fe"},
- {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8b8df03a9e995b6211dafa63b32f9d405881518ff1ddd775db4e7b98fb545e1c"},
- {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f11ae142f3a322d44513de1018b50f474f8f736bc3cd91d969f464b5bfef8836"},
- {file = "lxml-5.2.2-cp312-cp312-win32.whl", hash = "sha256:16a8326e51fcdffc886294c1e70b11ddccec836516a343f9ed0f82aac043c24a"},
- {file = "lxml-5.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:bbc4b80af581e18568ff07f6395c02114d05f4865c2812a1f02f2eaecf0bfd48"},
- {file = "lxml-5.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3d9d13603410b72787579769469af730c38f2f25505573a5888a94b62b920f8"},
- {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38b67afb0a06b8575948641c1d6d68e41b83a3abeae2ca9eed2ac59892b36706"},
- {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c689d0d5381f56de7bd6966a4541bff6e08bf8d3871bbd89a0c6ab18aa699573"},
- {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:cf2a978c795b54c539f47964ec05e35c05bd045db5ca1e8366988c7f2fe6b3ce"},
- {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:739e36ef7412b2bd940f75b278749106e6d025e40027c0b94a17ef7968d55d56"},
- {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d8bbcd21769594dbba9c37d3c819e2d5847656ca99c747ddb31ac1701d0c0ed9"},
- {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:2304d3c93f2258ccf2cf7a6ba8c761d76ef84948d87bf9664e14d203da2cd264"},
- {file = "lxml-5.2.2-cp36-cp36m-win32.whl", hash = "sha256:02437fb7308386867c8b7b0e5bc4cd4b04548b1c5d089ffb8e7b31009b961dc3"},
- {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"},
- {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"},
- {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"},
- {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"},
- {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"},
- {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"},
- {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"},
- {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"},
- {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"},
- {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"},
- {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"},
- {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"},
- {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"},
- {file = "lxml-5.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7ed07b3062b055d7a7f9d6557a251cc655eed0b3152b76de619516621c56f5d3"},
- {file = "lxml-5.2.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60fdd125d85bf9c279ffb8e94c78c51b3b6a37711464e1f5f31078b45002421"},
- {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7e24cb69ee5f32e003f50e016d5fde438010c1022c96738b04fc2423e61706"},
- {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23cfafd56887eaed93d07bc4547abd5e09d837a002b791e9767765492a75883f"},
- {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19b4e485cd07b7d83e3fe3b72132e7df70bfac22b14fe4bf7a23822c3a35bff5"},
- {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7ce7ad8abebe737ad6143d9d3bf94b88b93365ea30a5b81f6877ec9c0dee0a48"},
- {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e49b052b768bb74f58c7dda4e0bdf7b79d43a9204ca584ffe1fb48a6f3c84c66"},
- {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d14a0d029a4e176795cef99c056d58067c06195e0c7e2dbb293bf95c08f772a3"},
- {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:be49ad33819d7dcc28a309b86d4ed98e1a65f3075c6acd3cd4fe32103235222b"},
- {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a6d17e0370d2516d5bb9062c7b4cb731cff921fc875644c3d751ad857ba9c5b1"},
- {file = "lxml-5.2.2-cp38-cp38-win32.whl", hash = "sha256:5b8c041b6265e08eac8a724b74b655404070b636a8dd6d7a13c3adc07882ef30"},
- {file = "lxml-5.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:f61efaf4bed1cc0860e567d2ecb2363974d414f7f1f124b1df368bbf183453a6"},
- {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb91819461b1b56d06fa4bcf86617fac795f6a99d12239fb0c68dbeba41a0a30"},
- {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d4ed0c7cbecde7194cd3228c044e86bf73e30a23505af852857c09c24e77ec5d"},
- {file = "lxml-5.2.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54401c77a63cc7d6dc4b4e173bb484f28a5607f3df71484709fe037c92d4f0ed"},
- {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:625e3ef310e7fa3a761d48ca7ea1f9d8718a32b1542e727d584d82f4453d5eeb"},
- {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:519895c99c815a1a24a926d5b60627ce5ea48e9f639a5cd328bda0515ea0f10c"},
- {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7079d5eb1c1315a858bbf180000757db8ad904a89476653232db835c3114001"},
- {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:343ab62e9ca78094f2306aefed67dcfad61c4683f87eee48ff2fd74902447726"},
- {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:cd9e78285da6c9ba2d5c769628f43ef66d96ac3085e59b10ad4f3707980710d3"},
- {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:546cf886f6242dff9ec206331209db9c8e1643ae642dea5fdbecae2453cb50fd"},
- {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:02f6a8eb6512fdc2fd4ca10a49c341c4e109aa6e9448cc4859af5b949622715a"},
- {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:339ee4a4704bc724757cd5dd9dc8cf4d00980f5d3e6e06d5847c1b594ace68ab"},
- {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0a028b61a2e357ace98b1615fc03f76eb517cc028993964fe08ad514b1e8892d"},
- {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f90e552ecbad426eab352e7b2933091f2be77115bb16f09f78404861c8322981"},
- {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d83e2d94b69bf31ead2fa45f0acdef0757fa0458a129734f59f67f3d2eb7ef32"},
- {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a02d3c48f9bb1e10c7788d92c0c7db6f2002d024ab6e74d6f45ae33e3d0288a3"},
- {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6d68ce8e7b2075390e8ac1e1d3a99e8b6372c694bbe612632606d1d546794207"},
- {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:453d037e09a5176d92ec0fd282e934ed26d806331a8b70ab431a81e2fbabf56d"},
- {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:3b019d4ee84b683342af793b56bb35034bd749e4cbdd3d33f7d1107790f8c472"},
- {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb3942960f0beb9f46e2a71a3aca220d1ca32feb5a398656be934320804c0df9"},
- {file = "lxml-5.2.2-cp39-cp39-win32.whl", hash = "sha256:ac6540c9fff6e3813d29d0403ee7a81897f1d8ecc09a8ff84d2eea70ede1cdbf"},
- {file = "lxml-5.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:610b5c77428a50269f38a534057444c249976433f40f53e3b47e68349cca1425"},
- {file = "lxml-5.2.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b537bd04d7ccd7c6350cdaaaad911f6312cbd61e6e6045542f781c7f8b2e99d2"},
- {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4820c02195d6dfb7b8508ff276752f6b2ff8b64ae5d13ebe02e7667e035000b9"},
- {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a09f6184f17a80897172863a655467da2b11151ec98ba8d7af89f17bf63dae"},
- {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76acba4c66c47d27c8365e7c10b3d8016a7da83d3191d053a58382311a8bf4e1"},
- {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b128092c927eaf485928cec0c28f6b8bead277e28acf56800e972aa2c2abd7a2"},
- {file = "lxml-5.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ae791f6bd43305aade8c0e22f816b34f3b72b6c820477aab4d18473a37e8090b"},
- {file = "lxml-5.2.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a2f6a1bc2460e643785a2cde17293bd7a8f990884b822f7bca47bee0a82fc66b"},
- {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e8d351ff44c1638cb6e980623d517abd9f580d2e53bfcd18d8941c052a5a009"},
- {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec4bd9133420c5c52d562469c754f27c5c9e36ee06abc169612c959bd7dbb07"},
- {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:55ce6b6d803890bd3cc89975fca9de1dff39729b43b73cb15ddd933b8bc20484"},
- {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ab6a358d1286498d80fe67bd3d69fcbc7d1359b45b41e74c4a26964ca99c3f8"},
- {file = "lxml-5.2.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:06668e39e1f3c065349c51ac27ae430719d7806c026fec462e5693b08b95696b"},
- {file = "lxml-5.2.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9cd5323344d8ebb9fb5e96da5de5ad4ebab993bbf51674259dbe9d7a18049525"},
- {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89feb82ca055af0fe797a2323ec9043b26bc371365847dbe83c7fd2e2f181c34"},
- {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e481bba1e11ba585fb06db666bfc23dbe181dbafc7b25776156120bf12e0d5a6"},
- {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d6c6ea6a11ca0ff9cd0390b885984ed31157c168565702959c25e2191674a14"},
- {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3d98de734abee23e61f6b8c2e08a88453ada7d6486dc7cdc82922a03968928db"},
- {file = "lxml-5.2.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:69ab77a1373f1e7563e0fb5a29a8440367dec051da6c7405333699d07444f511"},
- {file = "lxml-5.2.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:34e17913c431f5ae01d8658dbf792fdc457073dcdfbb31dc0cc6ab256e664a8d"},
- {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f8757b03208c3f50097761be2dea0aba02e94f0dc7023ed73a7bb14ff11eb0"},
- {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a520b4f9974b0a0a6ed73c2154de57cdfd0c8800f4f15ab2b73238ffed0b36e"},
- {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5e097646944b66207023bc3c634827de858aebc226d5d4d6d16f0b77566ea182"},
- {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b5e4ef22ff25bfd4ede5f8fb30f7b24446345f3e79d9b7455aef2836437bc38a"},
- {file = "lxml-5.2.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff69a9a0b4b17d78170c73abe2ab12084bdf1691550c5629ad1fe7849433f324"},
- {file = "lxml-5.2.2.tar.gz", hash = "sha256:bb2dc4898180bea79863d5487e5f9c7c34297414bad54bcd0f0852aee9cfdb87"},
+ {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"},
+ {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"},
+ {file = "lxml-5.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a"},
+ {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8"},
+ {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330"},
+ {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965"},
+ {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22"},
+ {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b"},
+ {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7"},
+ {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8"},
+ {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32"},
+ {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86"},
+ {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5"},
+ {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03"},
+ {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7"},
+ {file = "lxml-5.3.0-cp310-cp310-win32.whl", hash = "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80"},
+ {file = "lxml-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3"},
+ {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b"},
+ {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18"},
+ {file = "lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442"},
+ {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4"},
+ {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f"},
+ {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e"},
+ {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c"},
+ {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16"},
+ {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79"},
+ {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080"},
+ {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654"},
+ {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d"},
+ {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763"},
+ {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec"},
+ {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be"},
+ {file = "lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9"},
+ {file = "lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1"},
+ {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859"},
+ {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e"},
+ {file = "lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f"},
+ {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e"},
+ {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179"},
+ {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a"},
+ {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3"},
+ {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1"},
+ {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d"},
+ {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c"},
+ {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99"},
+ {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff"},
+ {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a"},
+ {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8"},
+ {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d"},
+ {file = "lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30"},
+ {file = "lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f"},
+ {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a"},
+ {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd"},
+ {file = "lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51"},
+ {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b"},
+ {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002"},
+ {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4"},
+ {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492"},
+ {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3"},
+ {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4"},
+ {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367"},
+ {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832"},
+ {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff"},
+ {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd"},
+ {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb"},
+ {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b"},
+ {file = "lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957"},
+ {file = "lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d"},
+ {file = "lxml-5.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e"},
+ {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f"},
+ {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94"},
+ {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99"},
+ {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237"},
+ {file = "lxml-5.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577"},
+ {file = "lxml-5.3.0-cp36-cp36m-win32.whl", hash = "sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70"},
+ {file = "lxml-5.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c"},
+ {file = "lxml-5.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033"},
+ {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391"},
+ {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6"},
+ {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d"},
+ {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512"},
+ {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b"},
+ {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5"},
+ {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11"},
+ {file = "lxml-5.3.0-cp37-cp37m-win32.whl", hash = "sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84"},
+ {file = "lxml-5.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e"},
+ {file = "lxml-5.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753"},
+ {file = "lxml-5.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040"},
+ {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22"},
+ {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22"},
+ {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15"},
+ {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920"},
+ {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945"},
+ {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42"},
+ {file = "lxml-5.3.0-cp38-cp38-win32.whl", hash = "sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e"},
+ {file = "lxml-5.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903"},
+ {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de"},
+ {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc"},
+ {file = "lxml-5.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be"},
+ {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a"},
+ {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540"},
+ {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70"},
+ {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa"},
+ {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf"},
+ {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229"},
+ {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe"},
+ {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2"},
+ {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71"},
+ {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3"},
+ {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727"},
+ {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a"},
+ {file = "lxml-5.3.0-cp39-cp39-win32.whl", hash = "sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff"},
+ {file = "lxml-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2"},
+ {file = "lxml-5.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c"},
+ {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a"},
+ {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005"},
+ {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce"},
+ {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83"},
+ {file = "lxml-5.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba"},
+ {file = "lxml-5.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27"},
+ {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b"},
+ {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce"},
+ {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e"},
+ {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f"},
+ {file = "lxml-5.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875"},
+ {file = "lxml-5.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19"},
+ {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2"},
+ {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab"},
+ {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469"},
+ {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8"},
+ {file = "lxml-5.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1"},
+ {file = "lxml-5.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21"},
+ {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2"},
+ {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f"},
+ {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab"},
+ {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9"},
+ {file = "lxml-5.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c"},
+ {file = "lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f"},
]
[package.extras]
@@ -1869,94 +3321,218 @@ cssselect = ["cssselect (>=0.7)"]
html-clean = ["lxml-html-clean"]
html5 = ["html5lib"]
htmlsoup = ["BeautifulSoup4"]
-source = ["Cython (>=3.0.10)"]
+source = ["Cython (>=3.0.11)"]
+
+[[package]]
+name = "markdown-it-py"
+version = "3.0.0"
+description = "Python port of markdown-it. Markdown parsing, done right!"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
+ {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
+]
+
+[package.dependencies]
+mdurl = ">=0.1,<1.0"
+
+[package.extras]
+benchmarking = ["psutil", "pytest", "pytest-benchmark"]
+code-style = ["pre-commit (>=3.0,<4.0)"]
+compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"]
+linkify = ["linkify-it-py (>=1,<3)"]
+plugins = ["mdit-py-plugins"]
+profiling = ["gprof2dot"]
+rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
+testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
[[package]]
name = "markupsafe"
-version = "2.1.5"
+version = "3.0.1"
description = "Safely add untrusted strings to HTML/XML markup."
optional = false
+python-versions = ">=3.9"
+files = [
+ {file = "MarkupSafe-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:db842712984e91707437461930e6011e60b39136c7331e971952bb30465bc1a1"},
+ {file = "MarkupSafe-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ffb4a8e7d46ed96ae48805746755fadd0909fea2306f93d5d8233ba23dda12a"},
+ {file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67c519635a4f64e495c50e3107d9b4075aec33634272b5db1cde839e07367589"},
+ {file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48488d999ed50ba8d38c581d67e496f955821dc183883550a6fbc7f1aefdc170"},
+ {file = "MarkupSafe-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f31ae06f1328595d762c9a2bf29dafd8621c7d3adc130cbb46278079758779ca"},
+ {file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80fcbf3add8790caddfab6764bde258b5d09aefbe9169c183f88a7410f0f6dea"},
+ {file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3341c043c37d78cc5ae6e3e305e988532b072329639007fd408a476642a89fd6"},
+ {file = "MarkupSafe-3.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cb53e2a99df28eee3b5f4fea166020d3ef9116fdc5764bc5117486e6d1211b25"},
+ {file = "MarkupSafe-3.0.1-cp310-cp310-win32.whl", hash = "sha256:db15ce28e1e127a0013dfb8ac243a8e392db8c61eae113337536edb28bdc1f97"},
+ {file = "MarkupSafe-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:4ffaaac913c3f7345579db4f33b0020db693f302ca5137f106060316761beea9"},
+ {file = "MarkupSafe-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:26627785a54a947f6d7336ce5963569b5d75614619e75193bdb4e06e21d447ad"},
+ {file = "MarkupSafe-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b954093679d5750495725ea6f88409946d69cfb25ea7b4c846eef5044194f583"},
+ {file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:973a371a55ce9ed333a3a0f8e0bcfae9e0d637711534bcb11e130af2ab9334e7"},
+ {file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:244dbe463d5fb6d7ce161301a03a6fe744dac9072328ba9fc82289238582697b"},
+ {file = "MarkupSafe-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d98e66a24497637dd31ccab090b34392dddb1f2f811c4b4cd80c230205c074a3"},
+ {file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ad91738f14eb8da0ff82f2acd0098b6257621410dcbd4df20aaa5b4233d75a50"},
+ {file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7044312a928a66a4c2a22644147bc61a199c1709712069a344a3fb5cfcf16915"},
+ {file = "MarkupSafe-3.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a4792d3b3a6dfafefdf8e937f14906a51bd27025a36f4b188728a73382231d91"},
+ {file = "MarkupSafe-3.0.1-cp311-cp311-win32.whl", hash = "sha256:fa7d686ed9883f3d664d39d5a8e74d3c5f63e603c2e3ff0abcba23eac6542635"},
+ {file = "MarkupSafe-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ba25a71ebf05b9bb0e2ae99f8bc08a07ee8e98c612175087112656ca0f5c8bf"},
+ {file = "MarkupSafe-3.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8ae369e84466aa70f3154ee23c1451fda10a8ee1b63923ce76667e3077f2b0c4"},
+ {file = "MarkupSafe-3.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40f1e10d51c92859765522cbd79c5c8989f40f0419614bcdc5015e7b6bf97fc5"},
+ {file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a4cb365cb49b750bdb60b846b0c0bc49ed62e59a76635095a179d440540c346"},
+ {file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee3941769bd2522fe39222206f6dd97ae83c442a94c90f2b7a25d847d40f4729"},
+ {file = "MarkupSafe-3.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62fada2c942702ef8952754abfc1a9f7658a4d5460fabe95ac7ec2cbe0d02abc"},
+ {file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c2d64fdba74ad16138300815cfdc6ab2f4647e23ced81f59e940d7d4a1469d9"},
+ {file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fb532dd9900381d2e8f48172ddc5a59db4c445a11b9fab40b3b786da40d3b56b"},
+ {file = "MarkupSafe-3.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0f84af7e813784feb4d5e4ff7db633aba6c8ca64a833f61d8e4eade234ef0c38"},
+ {file = "MarkupSafe-3.0.1-cp312-cp312-win32.whl", hash = "sha256:cbf445eb5628981a80f54087f9acdbf84f9b7d862756110d172993b9a5ae81aa"},
+ {file = "MarkupSafe-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:a10860e00ded1dd0a65b83e717af28845bb7bd16d8ace40fe5531491de76b79f"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e81c52638315ff4ac1b533d427f50bc0afc746deb949210bc85f05d4f15fd772"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:312387403cd40699ab91d50735ea7a507b788091c416dd007eac54434aee51da"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ae99f31f47d849758a687102afdd05bd3d3ff7dbab0a8f1587981b58a76152a"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c97ff7fedf56d86bae92fa0a646ce1a0ec7509a7578e1ed238731ba13aabcd1c"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7420ceda262dbb4b8d839a4ec63d61c261e4e77677ed7c66c99f4e7cb5030dd"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45d42d132cff577c92bfba536aefcfea7e26efb975bd455db4e6602f5c9f45e7"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4c8817557d0de9349109acb38b9dd570b03cc5014e8aabf1cbddc6e81005becd"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a54c43d3ec4cf2a39f4387ad044221c66a376e58c0d0e971d47c475ba79c6b5"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313-win32.whl", hash = "sha256:c91b394f7601438ff79a4b93d16be92f216adb57d813a78be4446fe0f6bc2d8c"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:fe32482b37b4b00c7a52a07211b479653b7fe4f22b2e481b9a9b099d8a430f2f"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:17b2aea42a7280db02ac644db1d634ad47dcc96faf38ab304fe26ba2680d359a"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:852dc840f6d7c985603e60b5deaae1d89c56cb038b577f6b5b8c808c97580f1d"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0778de17cff1acaeccc3ff30cd99a3fd5c50fc58ad3d6c0e0c4c58092b859396"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:800100d45176652ded796134277ecb13640c1a537cad3b8b53da45aa96330453"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d06b24c686a34c86c8c1fba923181eae6b10565e4d80bdd7bc1c8e2f11247aa4"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:33d1c36b90e570ba7785dacd1faaf091203d9942bc036118fab8110a401eb1a8"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:beeebf760a9c1f4c07ef6a53465e8cfa776ea6a2021eda0d0417ec41043fe984"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bbde71a705f8e9e4c3e9e33db69341d040c827c7afa6789b14c6e16776074f5a"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313t-win32.whl", hash = "sha256:82b5dba6eb1bcc29cc305a18a3c5365d2af06ee71b123216416f7e20d2a84e5b"},
+ {file = "MarkupSafe-3.0.1-cp313-cp313t-win_amd64.whl", hash = "sha256:730d86af59e0e43ce277bb83970530dd223bf7f2a838e086b50affa6ec5f9295"},
+ {file = "MarkupSafe-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4935dd7883f1d50e2ffecca0aa33dc1946a94c8f3fdafb8df5c330e48f71b132"},
+ {file = "MarkupSafe-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e9393357f19954248b00bed7c56f29a25c930593a77630c719653d51e7669c2a"},
+ {file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40621d60d0e58aa573b68ac5e2d6b20d44392878e0bfc159012a5787c4e35bc8"},
+ {file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f94190df587738280d544971500b9cafc9b950d32efcb1fba9ac10d84e6aa4e6"},
+ {file = "MarkupSafe-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6a387d61fe41cdf7ea95b38e9af11cfb1a63499af2759444b99185c4ab33f5b"},
+ {file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8ad4ad1429cd4f315f32ef263c1342166695fad76c100c5d979c45d5570ed58b"},
+ {file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e24bfe89c6ac4c31792793ad9f861b8f6dc4546ac6dc8f1c9083c7c4f2b335cd"},
+ {file = "MarkupSafe-3.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2a4b34a8d14649315c4bc26bbfa352663eb51d146e35eef231dd739d54a5430a"},
+ {file = "MarkupSafe-3.0.1-cp39-cp39-win32.whl", hash = "sha256:242d6860f1fd9191aef5fae22b51c5c19767f93fb9ead4d21924e0bcb17619d8"},
+ {file = "MarkupSafe-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:93e8248d650e7e9d49e8251f883eed60ecbc0e8ffd6349e18550925e31bd029b"},
+ {file = "markupsafe-3.0.1.tar.gz", hash = "sha256:3e683ee4f5d0fa2dde4db77ed8dd8a876686e3fc417655c2ece9a90576905344"},
+]
+
+[[package]]
+name = "matplotlib"
+version = "3.9.2"
+description = "Python plotting package"
+optional = true
+python-versions = ">=3.9"
+files = [
+ {file = "matplotlib-3.9.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:9d78bbc0cbc891ad55b4f39a48c22182e9bdaea7fc0e5dbd364f49f729ca1bbb"},
+ {file = "matplotlib-3.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c375cc72229614632c87355366bdf2570c2dac01ac66b8ad048d2dabadf2d0d4"},
+ {file = "matplotlib-3.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d94ff717eb2bd0b58fe66380bd8b14ac35f48a98e7c6765117fe67fb7684e64"},
+ {file = "matplotlib-3.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab68d50c06938ef28681073327795c5db99bb4666214d2d5f880ed11aeaded66"},
+ {file = "matplotlib-3.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:65aacf95b62272d568044531e41de26285d54aec8cb859031f511f84bd8b495a"},
+ {file = "matplotlib-3.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:3fd595f34aa8a55b7fc8bf9ebea8aa665a84c82d275190a61118d33fbc82ccae"},
+ {file = "matplotlib-3.9.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d8dd059447824eec055e829258ab092b56bb0579fc3164fa09c64f3acd478772"},
+ {file = "matplotlib-3.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c797dac8bb9c7a3fd3382b16fe8f215b4cf0f22adccea36f1545a6d7be310b41"},
+ {file = "matplotlib-3.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d719465db13267bcef19ea8954a971db03b9f48b4647e3860e4bc8e6ed86610f"},
+ {file = "matplotlib-3.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8912ef7c2362f7193b5819d17dae8629b34a95c58603d781329712ada83f9447"},
+ {file = "matplotlib-3.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7741f26a58a240f43bee74965c4882b6c93df3e7eb3de160126d8c8f53a6ae6e"},
+ {file = "matplotlib-3.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:ae82a14dab96fbfad7965403c643cafe6515e386de723e498cf3eeb1e0b70cc7"},
+ {file = "matplotlib-3.9.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ac43031375a65c3196bee99f6001e7fa5bdfb00ddf43379d3c0609bdca042df9"},
+ {file = "matplotlib-3.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:be0fc24a5e4531ae4d8e858a1a548c1fe33b176bb13eff7f9d0d38ce5112a27d"},
+ {file = "matplotlib-3.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf81de2926c2db243c9b2cbc3917619a0fc85796c6ba4e58f541df814bbf83c7"},
+ {file = "matplotlib-3.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6ee45bc4245533111ced13f1f2cace1e7f89d1c793390392a80c139d6cf0e6c"},
+ {file = "matplotlib-3.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:306c8dfc73239f0e72ac50e5a9cf19cc4e8e331dd0c54f5e69ca8758550f1e1e"},
+ {file = "matplotlib-3.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:5413401594cfaff0052f9d8b1aafc6d305b4bd7c4331dccd18f561ff7e1d3bd3"},
+ {file = "matplotlib-3.9.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:18128cc08f0d3cfff10b76baa2f296fc28c4607368a8402de61bb3f2eb33c7d9"},
+ {file = "matplotlib-3.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4876d7d40219e8ae8bb70f9263bcbe5714415acfdf781086601211335e24f8aa"},
+ {file = "matplotlib-3.9.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d9f07a80deab4bb0b82858a9e9ad53d1382fd122be8cde11080f4e7dfedb38b"},
+ {file = "matplotlib-3.9.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7c0410f181a531ec4e93bbc27692f2c71a15c2da16766f5ba9761e7ae518413"},
+ {file = "matplotlib-3.9.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:909645cce2dc28b735674ce0931a4ac94e12f5b13f6bb0b5a5e65e7cea2c192b"},
+ {file = "matplotlib-3.9.2-cp313-cp313-win_amd64.whl", hash = "sha256:f32c7410c7f246838a77d6d1eff0c0f87f3cb0e7c4247aebea71a6d5a68cab49"},
+ {file = "matplotlib-3.9.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:37e51dd1c2db16ede9cfd7b5cabdfc818b2c6397c83f8b10e0e797501c963a03"},
+ {file = "matplotlib-3.9.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b82c5045cebcecd8496a4d694d43f9cc84aeeb49fe2133e036b207abe73f4d30"},
+ {file = "matplotlib-3.9.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f053c40f94bc51bc03832a41b4f153d83f2062d88c72b5e79997072594e97e51"},
+ {file = "matplotlib-3.9.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbe196377a8248972f5cede786d4c5508ed5f5ca4a1e09b44bda889958b33f8c"},
+ {file = "matplotlib-3.9.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5816b1e1fe8c192cbc013f8f3e3368ac56fbecf02fb41b8f8559303f24c5015e"},
+ {file = "matplotlib-3.9.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:cef2a73d06601437be399908cf13aee74e86932a5ccc6ccdf173408ebc5f6bb2"},
+ {file = "matplotlib-3.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e0830e188029c14e891fadd99702fd90d317df294c3298aad682739c5533721a"},
+ {file = "matplotlib-3.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ba9c1299c920964e8d3857ba27173b4dbb51ca4bab47ffc2c2ba0eb5e2cbc5"},
+ {file = "matplotlib-3.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1cd93b91ab47a3616b4d3c42b52f8363b88ca021e340804c6ab2536344fad9ca"},
+ {file = "matplotlib-3.9.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6d1ce5ed2aefcdce11904fc5bbea7d9c21fff3d5f543841edf3dea84451a09ea"},
+ {file = "matplotlib-3.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:b2696efdc08648536efd4e1601b5fd491fd47f4db97a5fbfd175549a7365c1b2"},
+ {file = "matplotlib-3.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d52a3b618cb1cbb769ce2ee1dcdb333c3ab6e823944e9a2d36e37253815f9556"},
+ {file = "matplotlib-3.9.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:039082812cacd6c6bec8e17a9c1e6baca230d4116d522e81e1f63a74d01d2e21"},
+ {file = "matplotlib-3.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6758baae2ed64f2331d4fd19be38b7b4eae3ecec210049a26b6a4f3ae1c85dcc"},
+ {file = "matplotlib-3.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:050598c2b29e0b9832cde72bcf97627bf00262adbc4a54e2b856426bb2ef0697"},
+ {file = "matplotlib-3.9.2.tar.gz", hash = "sha256:96ab43906269ca64a6366934106fa01534454a69e471b7bf3d79083981aaab92"},
+]
+
+[package.dependencies]
+contourpy = ">=1.0.1"
+cycler = ">=0.10"
+fonttools = ">=4.22.0"
+kiwisolver = ">=1.3.1"
+numpy = ">=1.23"
+packaging = ">=20.0"
+pillow = ">=8"
+pyparsing = ">=2.3.1"
+python-dateutil = ">=2.7"
+
+[package.extras]
+dev = ["meson-python (>=0.13.1)", "numpy (>=1.25)", "pybind11 (>=2.6)", "setuptools (>=64)", "setuptools_scm (>=7)"]
+
+[[package]]
+name = "matplotlib-inline"
+version = "0.1.7"
+description = "Inline Matplotlib backend for Jupyter"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"},
+ {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"},
+]
+
+[package.dependencies]
+traitlets = "*"
+
+[[package]]
+name = "mdurl"
+version = "0.1.2"
+description = "Markdown URL utilities"
+optional = true
python-versions = ">=3.7"
files = [
- {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"},
- {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"},
- {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"},
- {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"},
- {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"},
- {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"},
- {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"},
- {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"},
- {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"},
- {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"},
- {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"},
- {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"},
- {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"},
- {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"},
- {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"},
- {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"},
- {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"},
- {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"},
- {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"},
- {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"},
- {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"},
- {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"},
- {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"},
- {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"},
-]
-
-[[package]]
-name = "mkl"
-version = "2021.4.0"
-description = "Intel® oneAPI Math Kernel Library"
-optional = false
-python-versions = "*"
+ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
+ {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
+]
+
+[[package]]
+name = "meshio"
+version = "5.3.5"
+description = "I/O for many mesh formats"
+optional = true
+python-versions = ">=3.8"
files = [
- {file = "mkl-2021.4.0-py2.py3-none-macosx_10_15_x86_64.macosx_11_0_x86_64.whl", hash = "sha256:67460f5cd7e30e405b54d70d1ed3ca78118370b65f7327d495e9c8847705e2fb"},
- {file = "mkl-2021.4.0-py2.py3-none-manylinux1_i686.whl", hash = "sha256:636d07d90e68ccc9630c654d47ce9fdeb036bb46e2b193b3a9ac8cfea683cce5"},
- {file = "mkl-2021.4.0-py2.py3-none-manylinux1_x86_64.whl", hash = "sha256:398dbf2b0d12acaf54117a5210e8f191827f373d362d796091d161f610c1ebfb"},
- {file = "mkl-2021.4.0-py2.py3-none-win32.whl", hash = "sha256:439c640b269a5668134e3dcbcea4350459c4a8bc46469669b2d67e07e3d330e8"},
- {file = "mkl-2021.4.0-py2.py3-none-win_amd64.whl", hash = "sha256:ceef3cafce4c009dd25f65d7ad0d833a0fbadc3d8903991ec92351fe5de1e718"},
+ {file = "meshio-5.3.5-py3-none-any.whl", hash = "sha256:0736c6e34ecc768f62f2cde5d8233a3529512a9399b25c68ea2ca0d5900cdc10"},
+ {file = "meshio-5.3.5.tar.gz", hash = "sha256:f21f01abd9f29ba06ea119304b3d39e610421cfe93b9dd23362834919f87586d"},
]
[package.dependencies]
-intel-openmp = "==2021.*"
-tbb = "==2021.*"
+numpy = ">=1.20.0"
+rich = "*"
+
+[package.extras]
+all = ["h5py", "netCDF4"]
+
+[[package]]
+name = "mistune"
+version = "3.0.2"
+description = "A sane and fast Markdown parser with useful plugins and renderers"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "mistune-3.0.2-py3-none-any.whl", hash = "sha256:71481854c30fdbc938963d3605b72501f5c10a9320ecd412c121c163a1c7d205"},
+ {file = "mistune-3.0.2.tar.gz", hash = "sha256:fc7f93ded930c92394ef2cb6f04a8aabab4117a91449e72dcc8dfa646a508be8"},
+]
[[package]]
name = "mpmath"
@@ -2017,103 +3593,108 @@ pyopengl = "*"
[[package]]
name = "multidict"
-version = "6.0.5"
+version = "6.1.0"
description = "multidict implementation"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"},
- {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"},
- {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"},
- {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"},
- {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"},
- {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"},
- {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"},
- {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"},
- {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"},
- {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"},
- {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"},
- {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"},
- {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"},
- {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"},
- {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"},
- {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"},
- {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"},
- {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"},
- {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"},
- {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"},
- {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"},
- {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"},
- {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"},
- {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"},
- {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"},
- {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"},
- {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"},
- {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"},
- {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"},
- {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"},
- {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"},
- {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"},
- {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"},
- {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"},
- {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"},
- {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"},
- {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"},
- {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"},
- {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"},
- {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"},
- {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"},
- {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"},
- {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"},
- {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"},
- {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"},
- {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"},
- {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"},
- {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"},
- {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"},
- {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"},
- {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"},
- {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"},
- {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"},
- {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"},
- {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"},
- {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"},
- {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"},
- {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"},
- {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"},
- {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"},
- {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"},
- {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"},
- {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"},
- {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"},
- {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"},
- {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"},
- {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"},
- {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"},
- {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"},
- {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"},
- {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"},
- {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"},
- {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"},
- {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"},
- {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"},
- {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"},
- {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"},
- {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"},
- {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"},
- {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"},
- {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"},
- {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"},
- {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"},
- {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"},
- {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"},
- {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"},
- {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"},
- {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"},
- {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"},
- {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"},
+ {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"},
+ {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"},
+ {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"},
+ {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"},
+ {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"},
+ {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"},
+ {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"},
+ {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"},
+ {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"},
+ {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"},
+ {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"},
+ {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"},
+ {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"},
+ {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"},
+ {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"},
+ {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"},
+ {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"},
+ {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"},
+ {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"},
+ {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"},
+ {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"},
+ {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"},
+ {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"},
+ {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"},
+ {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"},
+ {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"},
+ {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"},
+ {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"},
+ {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"},
+ {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"},
+ {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"},
+ {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"},
+ {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"},
+ {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"},
+ {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"},
+ {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"},
+ {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"},
+ {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"},
+ {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"},
+ {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"},
+ {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"},
+ {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"},
+ {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"},
+ {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"},
+ {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"},
+ {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"},
+ {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"},
+ {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"},
+ {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"},
+ {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"},
+ {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"},
+ {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"},
+ {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"},
+ {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"},
+ {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"},
+ {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"},
+ {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"},
+ {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"},
+ {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"},
+ {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"},
+ {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"},
+ {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"},
+ {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"},
+ {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"},
+ {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"},
+ {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"},
+ {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"},
+ {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"},
+ {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"},
+ {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"},
+ {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"},
+ {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"},
+ {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"},
+ {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"},
+ {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"},
+ {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"},
+ {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"},
+ {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"},
+ {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"},
+ {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"},
+ {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"},
+ {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"},
+ {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"},
+ {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"},
+ {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"},
+ {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"},
+ {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"},
+ {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"},
+ {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"},
+ {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"},
+ {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"},
+ {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"},
]
+[package.dependencies]
+typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""}
+
[[package]]
name = "multiprocess"
version = "0.70.16"
@@ -2138,22 +3719,114 @@ files = [
[package.dependencies]
dill = ">=0.3.8"
+[[package]]
+name = "nbclient"
+version = "0.10.0"
+description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor."
+optional = true
+python-versions = ">=3.8.0"
+files = [
+ {file = "nbclient-0.10.0-py3-none-any.whl", hash = "sha256:f13e3529332a1f1f81d82a53210322476a168bb7090a0289c795fe9cc11c9d3f"},
+ {file = "nbclient-0.10.0.tar.gz", hash = "sha256:4b3f1b7dba531e498449c4db4f53da339c91d449dc11e9af3a43b4eb5c5abb09"},
+]
+
+[package.dependencies]
+jupyter-client = ">=6.1.12"
+jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0"
+nbformat = ">=5.1"
+traitlets = ">=5.4"
+
+[package.extras]
+dev = ["pre-commit"]
+docs = ["autodoc-traits", "mock", "moto", "myst-parser", "nbclient[test]", "sphinx (>=1.7)", "sphinx-book-theme", "sphinxcontrib-spelling"]
+test = ["flaky", "ipykernel (>=6.19.3)", "ipython", "ipywidgets", "nbconvert (>=7.0.0)", "pytest (>=7.0,<8)", "pytest-asyncio", "pytest-cov (>=4.0)", "testpath", "xmltodict"]
+
+[[package]]
+name = "nbconvert"
+version = "7.16.4"
+description = "Converting Jupyter Notebooks (.ipynb files) to other formats. Output formats include asciidoc, html, latex, markdown, pdf, py, rst, script. nbconvert can be used both as a Python library (`import nbconvert`) or as a command line tool (invoked as `jupyter nbconvert ...`)."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "nbconvert-7.16.4-py3-none-any.whl", hash = "sha256:05873c620fe520b6322bf8a5ad562692343fe3452abda5765c7a34b7d1aa3eb3"},
+ {file = "nbconvert-7.16.4.tar.gz", hash = "sha256:86ca91ba266b0a448dc96fa6c5b9d98affabde2867b363258703536807f9f7f4"},
+]
+
+[package.dependencies]
+beautifulsoup4 = "*"
+bleach = "!=5.0.0"
+defusedxml = "*"
+jinja2 = ">=3.0"
+jupyter-core = ">=4.7"
+jupyterlab-pygments = "*"
+markupsafe = ">=2.0"
+mistune = ">=2.0.3,<4"
+nbclient = ">=0.5.0"
+nbformat = ">=5.7"
+packaging = "*"
+pandocfilters = ">=1.4.1"
+pygments = ">=2.4.1"
+tinycss2 = "*"
+traitlets = ">=5.1"
+
+[package.extras]
+all = ["flaky", "ipykernel", "ipython", "ipywidgets (>=7.5)", "myst-parser", "nbsphinx (>=0.2.12)", "playwright", "pydata-sphinx-theme", "pyqtwebengine (>=5.15)", "pytest (>=7)", "sphinx (==5.0.2)", "sphinxcontrib-spelling", "tornado (>=6.1)"]
+docs = ["ipykernel", "ipython", "myst-parser", "nbsphinx (>=0.2.12)", "pydata-sphinx-theme", "sphinx (==5.0.2)", "sphinxcontrib-spelling"]
+qtpdf = ["pyqtwebengine (>=5.15)"]
+qtpng = ["pyqtwebengine (>=5.15)"]
+serve = ["tornado (>=6.1)"]
+test = ["flaky", "ipykernel", "ipywidgets (>=7.5)", "pytest (>=7)"]
+webpdf = ["playwright"]
+
+[[package]]
+name = "nbformat"
+version = "5.10.4"
+description = "The Jupyter Notebook format"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "nbformat-5.10.4-py3-none-any.whl", hash = "sha256:3b48d6c8fbca4b299bf3982ea7db1af21580e4fec269ad087b9e81588891200b"},
+ {file = "nbformat-5.10.4.tar.gz", hash = "sha256:322168b14f937a5d11362988ecac2a4952d3d8e3a2cbeb2319584631226d5b3a"},
+]
+
+[package.dependencies]
+fastjsonschema = ">=2.15"
+jsonschema = ">=2.6"
+jupyter-core = ">=4.12,<5.0.dev0 || >=5.1.dev0"
+traitlets = ">=5.1"
+
+[package.extras]
+docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"]
+test = ["pep440", "pre-commit", "pytest", "testpath"]
+
+[[package]]
+name = "nest-asyncio"
+version = "1.6.0"
+description = "Patch asyncio to allow nested event loops"
+optional = true
+python-versions = ">=3.5"
+files = [
+ {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"},
+ {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"},
+]
+
[[package]]
name = "networkx"
-version = "3.3"
+version = "3.4"
description = "Python package for creating and manipulating graphs and networks"
optional = false
python-versions = ">=3.10"
files = [
- {file = "networkx-3.3-py3-none-any.whl", hash = "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2"},
- {file = "networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9"},
+ {file = "networkx-3.4-py3-none-any.whl", hash = "sha256:46dad0ec74a825a968e2b36c37ef5b91faa3868f017b2283d9cbff33112222ce"},
+ {file = "networkx-3.4.tar.gz", hash = "sha256:1269b90f8f0d3a4095f016f49650f35ac169729f49b69d0572b2bb142748162b"},
]
[package.extras]
-default = ["matplotlib (>=3.6)", "numpy (>=1.23)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"]
+default = ["matplotlib (>=3.7)", "numpy (>=1.24)", "pandas (>=2.0)", "scipy (>=1.10,!=1.11.0,!=1.11.1)"]
developer = ["changelist (==0.5)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"]
-doc = ["myst-nb (>=1.0)", "numpydoc (>=1.7)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"]
-extra = ["lxml (>=4.6)", "pydot (>=2.0)", "pygraphviz (>=1.12)", "sympy (>=1.10)"]
+doc = ["intersphinx-registry", "myst-nb (>=1.1)", "numpydoc (>=1.8.0)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.15)", "sphinx (>=7.3)", "sphinx-gallery (>=0.16)", "texext (>=0.6.7)"]
+example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "momepy (>=0.7.2)", "osmnx (>=1.9)", "scikit-learn (>=1.5)", "seaborn (>=0.13)"]
+extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.10)"]
test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"]
[[package]]
@@ -2167,6 +3840,58 @@ files = [
{file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"},
]
+[[package]]
+name = "nose"
+version = "1.3.7"
+description = "nose extends unittest to make testing easier"
+optional = true
+python-versions = "*"
+files = [
+ {file = "nose-1.3.7-py2-none-any.whl", hash = "sha256:dadcddc0aefbf99eea214e0f1232b94f2fa9bd98fa8353711dacb112bfcbbb2a"},
+ {file = "nose-1.3.7-py3-none-any.whl", hash = "sha256:9ff7c6cc443f8c51994b34a667bbcf45afd6d945be7477b52e97516fd17c53ac"},
+ {file = "nose-1.3.7.tar.gz", hash = "sha256:f1bffef9cbc82628f6e7d7b40d7e255aefaa1adb6a1b1d26c69a8b79e6208a98"},
+]
+
+[[package]]
+name = "notebook"
+version = "7.2.2"
+description = "Jupyter Notebook - A web-based notebook environment for interactive computing"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "notebook-7.2.2-py3-none-any.whl", hash = "sha256:c89264081f671bc02eec0ed470a627ed791b9156cad9285226b31611d3e9fe1c"},
+ {file = "notebook-7.2.2.tar.gz", hash = "sha256:2ef07d4220421623ad3fe88118d687bc0450055570cdd160814a59cf3a1c516e"},
+]
+
+[package.dependencies]
+jupyter-server = ">=2.4.0,<3"
+jupyterlab = ">=4.2.0,<4.3"
+jupyterlab-server = ">=2.27.1,<3"
+notebook-shim = ">=0.2,<0.3"
+tornado = ">=6.2.0"
+
+[package.extras]
+dev = ["hatch", "pre-commit"]
+docs = ["myst-parser", "nbsphinx", "pydata-sphinx-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt", "sphinxcontrib-spelling"]
+test = ["importlib-resources (>=5.0)", "ipykernel", "jupyter-server[test] (>=2.4.0,<3)", "jupyterlab-server[test] (>=2.27.1,<3)", "nbval", "pytest (>=7.0)", "pytest-console-scripts", "pytest-timeout", "pytest-tornasync", "requests"]
+
+[[package]]
+name = "notebook-shim"
+version = "0.2.4"
+description = "A shim layer for notebook traits and config"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "notebook_shim-0.2.4-py3-none-any.whl", hash = "sha256:411a5be4e9dc882a074ccbcae671eda64cceb068767e9a3419096986560e1cef"},
+ {file = "notebook_shim-0.2.4.tar.gz", hash = "sha256:b4b2cfa1b65d98307ca24361f5b30fe785b53c3fd07b7a47e89acb5e6ac638cb"},
+]
+
+[package.dependencies]
+jupyter-server = ">=1.8,<3"
+
+[package.extras]
+test = ["pytest", "pytest-console-scripts", "pytest-jupyter", "pytest-tornasync"]
+
[[package]]
name = "numba"
version = "0.60.0"
@@ -2203,31 +3928,35 @@ numpy = ">=1.22,<2.1"
[[package]]
name = "numcodecs"
-version = "0.13.0"
+version = "0.13.1"
description = "A Python package providing buffer compression and transformation codecs for use in data storage and communication applications."
optional = false
python-versions = ">=3.10"
files = [
- {file = "numcodecs-0.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:56e49f68ce6aeba29f144992524c8897d94f846d02bbcc820dd29d7c5c2a073e"},
- {file = "numcodecs-0.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:17bc4b568214582f4c623700592f633f3afd920848630049c584fa1e535253ad"},
- {file = "numcodecs-0.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eed420a9c62d0a569aa94a387f93045f068ad3e7bbd787c6ce70bc5fefbaa7d9"},
- {file = "numcodecs-0.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:e7d3b9693df52eeaf978d2a56971d01cf9b4e284ae769ec764807f2087cce51d"},
- {file = "numcodecs-0.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f208a1b8b5e66c767ed043812ca74d9045e09b7b2e085d064a585c30b9efc8e7"},
- {file = "numcodecs-0.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a68368d3ce625ec76fcacd84785f6110d30a232909d5c6093a7aa25628880477"},
- {file = "numcodecs-0.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5904216811f2e9d312c23ffaad3b3d4c7442a3583d3a8bf81ca8319e9f5deb5"},
- {file = "numcodecs-0.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:208cab0f4d9cf4409e9c4a4c935e165833786614822c81dee9d865af372da9df"},
- {file = "numcodecs-0.13.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f3cf462d2357998d7f6baaa0427657b0eeda3eb79fba2b146d2d04542912a513"},
- {file = "numcodecs-0.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ac4dd5556fb126271e93bd1a02266e21b01d3617db448d70d00eec8e034506b4"},
- {file = "numcodecs-0.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:820be89729583c91601a6b35c052008cdd2665b25bfedb91b367cc155fb34ba0"},
- {file = "numcodecs-0.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:d67a859dd8a7f026829e91cb1799c26720cc9d29ee4ae0060cc7a581670abc06"},
- {file = "numcodecs-0.13.0.tar.gz", hash = "sha256:ba4fac7036ea5a078c7afe1d4dffeb9685080d42f19c9c16b12dad866703aa2e"},
+ {file = "numcodecs-0.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:96add4f783c5ce57cc7e650b6cac79dd101daf887c479a00a29bc1487ced180b"},
+ {file = "numcodecs-0.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:237b7171609e868a20fd313748494444458ccd696062f67e198f7f8f52000c15"},
+ {file = "numcodecs-0.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96e42f73c31b8c24259c5fac6adba0c3ebf95536e37749dc6c62ade2989dca28"},
+ {file = "numcodecs-0.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:eda7d7823c9282e65234731fd6bd3986b1f9e035755f7fed248d7d366bb291ab"},
+ {file = "numcodecs-0.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2eda97dd2f90add98df6d295f2c6ae846043396e3d51a739ca5db6c03b5eb666"},
+ {file = "numcodecs-0.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2a86f5367af9168e30f99727ff03b27d849c31ad4522060dde0bce2923b3a8bc"},
+ {file = "numcodecs-0.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233bc7f26abce24d57e44ea8ebeb5cd17084690b4e7409dd470fdb75528d615f"},
+ {file = "numcodecs-0.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:796b3e6740107e4fa624cc636248a1580138b3f1c579160f260f76ff13a4261b"},
+ {file = "numcodecs-0.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5195bea384a6428f8afcece793860b1ab0ae28143c853f0b2b20d55a8947c917"},
+ {file = "numcodecs-0.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3501a848adaddce98a71a262fee15cd3618312692aa419da77acd18af4a6a3f6"},
+ {file = "numcodecs-0.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da2230484e6102e5fa3cc1a5dd37ca1f92dfbd183d91662074d6f7574e3e8f53"},
+ {file = "numcodecs-0.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:e5db4824ebd5389ea30e54bc8aeccb82d514d28b6b68da6c536b8fa4596f4bca"},
+ {file = "numcodecs-0.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7a60d75179fd6692e301ddfb3b266d51eb598606dcae7b9fc57f986e8d65cb43"},
+ {file = "numcodecs-0.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3f593c7506b0ab248961a3b13cb148cc6e8355662ff124ac591822310bc55ecf"},
+ {file = "numcodecs-0.13.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80d3071465f03522e776a31045ddf2cfee7f52df468b977ed3afdd7fe5869701"},
+ {file = "numcodecs-0.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:90d3065ae74c9342048ae0046006f99dcb1388b7288da5a19b3bddf9c30c3176"},
+ {file = "numcodecs-0.13.1.tar.gz", hash = "sha256:a3cf37881df0898f3a9c0d4477df88133fe85185bffe57ba31bcc2fa207709bc"},
]
[package.dependencies]
numpy = ">=1.7"
[package.extras]
-docs = ["mock", "numpydoc", "pydata-sphinx-theme", "sphinx (<7.0.0)", "sphinx-issues"]
+docs = ["mock", "numpydoc", "pydata-sphinx-theme", "sphinx", "sphinx-issues"]
msgpack = ["msgpack"]
pcodec = ["pcodec (>=0.2.0)"]
test = ["coverage", "pytest", "pytest-cov"]
@@ -2279,6 +4008,21 @@ files = [
{file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"},
]
+[[package]]
+name = "numpy-stl"
+version = "3.1.2"
+description = "Library to make reading, writing and modifying both binary and ascii STL files easy."
+optional = true
+python-versions = ">3.6.0"
+files = [
+ {file = "numpy_stl-3.1.2-py3-none-any.whl", hash = "sha256:a55288340c837378bf44753a1c595c6823312995acda97f27ed04db4ff1d25f3"},
+ {file = "numpy_stl-3.1.2.tar.gz", hash = "sha256:72b46950dfa3642df1c7b873cfa78a548533724b907478c567db42fdf57ee3d2"},
+]
+
+[package.dependencies]
+numpy = "*"
+python-utils = ">=3.4.5"
+
[[package]]
name = "nvidia-cublas-cu12"
version = "12.1.3.1"
@@ -2325,12 +4069,13 @@ files = [
[[package]]
name = "nvidia-cudnn-cu12"
-version = "8.9.2.26"
+version = "9.1.0.70"
description = "cuDNN runtime libraries"
optional = false
python-versions = ">=3"
files = [
- {file = "nvidia_cudnn_cu12-8.9.2.26-py3-none-manylinux1_x86_64.whl", hash = "sha256:5ccb288774fdfb07a7e7025ffec286971c06d8d7b4fb162525334616d7629ff9"},
+ {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-manylinux2014_x86_64.whl", hash = "sha256:165764f44ef8c61fcdfdfdbe769d687e06374059fbb388b6c89ecb0e28793a6f"},
+ {file = "nvidia_cudnn_cu12-9.1.0.70-py3-none-win_amd64.whl", hash = "sha256:6278562929433d68365a07a4a1546c237ba2849852c0d4b2262a486e805b977a"},
]
[package.dependencies]
@@ -2401,14 +4146,14 @@ files = [
[[package]]
name = "nvidia-nvjitlink-cu12"
-version = "12.5.82"
+version = "12.6.77"
description = "Nvidia JIT LTO Library"
optional = false
python-versions = ">=3"
files = [
- {file = "nvidia_nvjitlink_cu12-12.5.82-py3-none-manylinux2014_aarch64.whl", hash = "sha256:98103729cc5226e13ca319a10bbf9433bbbd44ef64fe72f45f067cacc14b8d27"},
- {file = "nvidia_nvjitlink_cu12-12.5.82-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f9b37bc5c8cf7509665cb6ada5aaa0ce65618f2332b7d3e78e9790511f111212"},
- {file = "nvidia_nvjitlink_cu12-12.5.82-py3-none-win_amd64.whl", hash = "sha256:e782564d705ff0bf61ac3e1bf730166da66dd2fe9012f111ede5fc49b64ae697"},
+ {file = "nvidia_nvjitlink_cu12-12.6.77-py3-none-manylinux2014_aarch64.whl", hash = "sha256:3bf10d85bb1801e9c894c6e197e44dd137d2a0a9e43f8450e9ad13f2df0dd52d"},
+ {file = "nvidia_nvjitlink_cu12-12.6.77-py3-none-manylinux2014_x86_64.whl", hash = "sha256:9ae346d16203ae4ea513be416495167a0101d33d2d14935aa9c1829a3fb45142"},
+ {file = "nvidia_nvjitlink_cu12-12.6.77-py3-none-win_amd64.whl", hash = "sha256:410718cd44962bed862a31dd0318620f6f9a8b28a6291967bcfcb446a6516771"},
]
[[package]]
@@ -2437,6 +4182,66 @@ files = [
antlr4-python3-runtime = "==4.9.*"
PyYAML = ">=5.1.0"
+[[package]]
+name = "open3d"
+version = "0.18.0"
+description = "Open3D: A Modern Library for 3D Data Processing."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "open3d-0.18.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:48ee627a142a5453c4a2869b529310acb6f6b2507989cb9199c56e75796c575e"},
+ {file = "open3d-0.18.0-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:9f3df5e8e8fe514b8285d05e43a4a3d57243d42d5c1dc9212adf8f18b6ab59b4"},
+ {file = "open3d-0.18.0-cp310-cp310-manylinux_2_27_aarch64.whl", hash = "sha256:b9c8c8059cb92cd8b73c287385eeddf46195f2609ac7052302d6ac844a373dbf"},
+ {file = "open3d-0.18.0-cp310-cp310-manylinux_2_27_x86_64.whl", hash = "sha256:f649d5d58090f73a337895fb0022c7b05c00f47f704b5722b103cceba04cc870"},
+ {file = "open3d-0.18.0-cp310-cp310-win_amd64.whl", hash = "sha256:48cdf2af3051320140d198f5d3ea3a85eeb3355e7a989a835b611b16589b9646"},
+ {file = "open3d-0.18.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:b35a68b9fef3e963266db3bb15fbfef20e05787bc61192f61725fde5215f3560"},
+ {file = "open3d-0.18.0-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:2182b818dcd3290dd2ddb0021ad0453bfda99098c931d5b2fc636a341cb3ca70"},
+ {file = "open3d-0.18.0-cp311-cp311-manylinux_2_27_aarch64.whl", hash = "sha256:882f1e5039a3c1c5ec05183eb650537fd7431238b7ccb2b742ca5479f02f705b"},
+ {file = "open3d-0.18.0-cp311-cp311-manylinux_2_27_x86_64.whl", hash = "sha256:8e3d1d1900a8f4d956f6819c246c78081725b9b0888f8549d2a7a49c8daa1303"},
+ {file = "open3d-0.18.0-cp311-cp311-win_amd64.whl", hash = "sha256:2da5da6c9eb9227baee6fe98baa992233aca36b83ec9e7d4093c77e762db60e6"},
+ {file = "open3d-0.18.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:877e67237f2a97f8219870108eecf1ff447a81b0fcf1d2eacea246c9619fc55c"},
+ {file = "open3d-0.18.0-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:0ec03fcd48a939ec105896e0d02a9d006e8328c60491a0647b9a4fe5d9e4117d"},
+ {file = "open3d-0.18.0-cp38-cp38-manylinux_2_27_aarch64.whl", hash = "sha256:477ed692bafd0ed591676d78bcb898bb2b684dcaa2886befe29e1b19d38a7c6d"},
+ {file = "open3d-0.18.0-cp38-cp38-manylinux_2_27_x86_64.whl", hash = "sha256:46b9c1b900716771827b78006cfd18489b5327eabda8cd3d01e028b8173f4301"},
+ {file = "open3d-0.18.0-cp38-cp38-win_amd64.whl", hash = "sha256:d745affd0c7c765ed30ae9010abc4cfa80980b2c9f39a4f8678e8a9ef41ce089"},
+ {file = "open3d-0.18.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:cce30304dfde3b9c0fbfca21687cf0e5280bcbabe2920d2c804ab352bfe610a5"},
+ {file = "open3d-0.18.0-cp39-cp39-macosx_13_0_universal2.whl", hash = "sha256:ba5e07ca9a3ec6c70393bd2c5c707455a2e9c54209ccccca15ecf03834efd353"},
+ {file = "open3d-0.18.0-cp39-cp39-manylinux_2_27_aarch64.whl", hash = "sha256:23a3bf135c7e69d4116f54b1ff78f58846245b5e70640b291981cee9e49a53d7"},
+ {file = "open3d-0.18.0-cp39-cp39-manylinux_2_27_x86_64.whl", hash = "sha256:7d05fd6eedf75136cfbed24983da30bdfd08a6c4b1f968bf80ab84efc1fac861"},
+ {file = "open3d-0.18.0-cp39-cp39-win_amd64.whl", hash = "sha256:41be2d652f1b9feed9efb8775b29368ece0b4328ba6e90278486ff7643c6d480"},
+]
+
+[package.dependencies]
+configargparse = "*"
+dash = ">=2.6.0"
+nbformat = ">=5.7.0"
+numpy = ">=1.18.0"
+werkzeug = ">=2.2.3"
+
+[[package]]
+name = "opencv-contrib-python"
+version = "4.10.0.84"
+description = "Wrapper package for OpenCV python bindings."
+optional = true
+python-versions = ">=3.6"
+files = [
+ {file = "opencv-contrib-python-4.10.0.84.tar.gz", hash = "sha256:4a3eae0ed9cadf1abe9293a6938a25a540e2fd6d7fc308595caa5896c8b36a0c"},
+ {file = "opencv_contrib_python-4.10.0.84-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:ee4b0919026d8c533aeb69b16c6ec4a891a2f6844efaa14121bf68838753209c"},
+ {file = "opencv_contrib_python-4.10.0.84-cp37-abi3-macosx_12_0_x86_64.whl", hash = "sha256:dea80d4db73b8acccf9e16b5744bf3654f47b22745074263f0a6c10de26c5ef5"},
+ {file = "opencv_contrib_python-4.10.0.84-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:040575b69e4f3aa761676bace4e3d1b8485fbfaf77ef77b266ab6bda5a3b5e9b"},
+ {file = "opencv_contrib_python-4.10.0.84-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a261223db41f6e512d76deaf21c8fcfb4fbbcbc2de62ca7f74a05f2c9ee489ef"},
+ {file = "opencv_contrib_python-4.10.0.84-cp37-abi3-win32.whl", hash = "sha256:2a36257ec1375d1bec2a62177ea39828ff9804de6831ee39646bdc875c343cec"},
+ {file = "opencv_contrib_python-4.10.0.84-cp37-abi3-win_amd64.whl", hash = "sha256:47ec3160dae75f70e099b286d1a2e086d20dac8b06e759f60eaf867e6bdecba7"},
+]
+
+[package.dependencies]
+numpy = [
+ {version = ">=1.21.4", markers = "python_version >= \"3.10\" and platform_system == \"Darwin\" and python_version < \"3.11\""},
+ {version = ">=1.21.2", markers = "platform_system != \"Darwin\" and python_version >= \"3.10\" and python_version < \"3.11\""},
+ {version = ">=1.23.5", markers = "python_version >= \"3.11\" and python_version < \"3.12\""},
+ {version = ">=1.26.0", markers = "python_version >= \"3.12\""},
+]
+
[[package]]
name = "opencv-python"
version = "4.10.0.84"
@@ -2462,18 +4267,26 @@ numpy = [
]
[[package]]
-name = "ordered-set"
-version = "4.1.0"
-description = "An OrderedSet is a custom MutableSet that remembers its order, so that every"
+name = "orderly-set"
+version = "5.2.2"
+description = "Orderly set"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"},
- {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"},
+ {file = "orderly_set-5.2.2-py3-none-any.whl", hash = "sha256:f7a37c95a38c01cdfe41c3ffb62925a318a2286ea0a41790c057fc802aec54da"},
+ {file = "orderly_set-5.2.2.tar.gz", hash = "sha256:52a18b86aaf3f5d5a498bbdb27bf3253a4e5c57ab38e5b7a56fa00115cd28448"},
]
-[package.extras]
-dev = ["black", "mypy", "pytest"]
+[[package]]
+name = "overrides"
+version = "7.7.0"
+description = "A decorator to automatically detect mismatch when overriding a method."
+optional = true
+python-versions = ">=3.6"
+files = [
+ {file = "overrides-7.7.0-py3-none-any.whl", hash = "sha256:c7ed9d062f78b8e4c1a7b70bd8796b35ead4d9f510227ef9c5dc7626c60d7e49"},
+ {file = "overrides-7.7.0.tar.gz", hash = "sha256:55158fa3d93b98cc75299b1e67078ad9003ca27945c76162c1c0766d6f91820a"},
+]
[[package]]
name = "packaging"
@@ -2488,40 +4301,53 @@ files = [
[[package]]
name = "pandas"
-version = "2.2.2"
+version = "2.2.3"
description = "Powerful data structures for data analysis, time series, and statistics"
optional = false
python-versions = ">=3.9"
files = [
- {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"},
- {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"},
- {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"},
- {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"},
- {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"},
- {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"},
- {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"},
- {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"},
- {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"},
- {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"},
- {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"},
- {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"},
- {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"},
- {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"},
- {file = "pandas-2.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9dfde2a0ddef507a631dc9dc4af6a9489d5e2e740e226ad426a05cabfbd7c8ef"},
- {file = "pandas-2.2.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e9b79011ff7a0f4b1d6da6a61aa1aa604fb312d6647de5bad20013682d1429ce"},
- {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cb51fe389360f3b5a4d57dbd2848a5f033350336ca3b340d1c53a1fad33bcad"},
- {file = "pandas-2.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee3a87076c0756de40b05c5e9a6069c035ba43e8dd71c379e68cab2c20f16ad"},
- {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3e374f59e440d4ab45ca2fffde54b81ac3834cf5ae2cdfa69c90bc03bde04d76"},
- {file = "pandas-2.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:43498c0bdb43d55cb162cdc8c06fac328ccb5d2eabe3cadeb3529ae6f0517c32"},
- {file = "pandas-2.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:d187d355ecec3629624fccb01d104da7d7f391db0311145817525281e2804d23"},
- {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"},
- {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"},
- {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"},
- {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"},
- {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"},
- {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"},
- {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"},
- {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"},
+ {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"},
+ {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"},
+ {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"},
+ {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"},
+ {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"},
+ {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"},
+ {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"},
+ {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"},
+ {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"},
+ {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"},
+ {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"},
+ {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"},
+ {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"},
+ {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"},
+ {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"},
+ {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"},
+ {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"},
+ {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"},
+ {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"},
+ {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"},
+ {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"},
+ {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"},
+ {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"},
+ {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"},
+ {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"},
+ {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"},
+ {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"},
+ {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"},
+ {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"},
+ {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"},
+ {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"},
+ {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"},
+ {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"},
+ {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"},
+ {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"},
+ {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"},
+ {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"},
+ {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"},
+ {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"},
+ {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"},
+ {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"},
+ {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"},
]
[package.dependencies]
@@ -2559,6 +4385,43 @@ sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-d
test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"]
xml = ["lxml (>=4.9.2)"]
+[[package]]
+name = "pandocfilters"
+version = "1.5.1"
+description = "Utilities for writing pandoc filters in python"
+optional = true
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
+files = [
+ {file = "pandocfilters-1.5.1-py2.py3-none-any.whl", hash = "sha256:93be382804a9cdb0a7267585f157e5d1731bbe5545a85b268d6f5fe6232de2bc"},
+ {file = "pandocfilters-1.5.1.tar.gz", hash = "sha256:002b4a555ee4ebc03f8b66307e287fa492e4a77b4ea14d3f934328297bb4939e"},
+]
+
+[[package]]
+name = "parso"
+version = "0.8.4"
+description = "A Python Parser"
+optional = true
+python-versions = ">=3.6"
+files = [
+ {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"},
+ {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"},
+]
+
+[package.extras]
+qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"]
+testing = ["docopt", "pytest"]
+
+[[package]]
+name = "pathlib"
+version = "1.0.1"
+description = "Object-oriented filesystem paths"
+optional = true
+python-versions = "*"
+files = [
+ {file = "pathlib-1.0.1-py3-none-any.whl", hash = "sha256:f35f95ab8b0f59e6d354090350b44a80a80635d22efdedfa84c7ad1cf0a74147"},
+ {file = "pathlib-1.0.1.tar.gz", hash = "sha256:6940718dfc3eff4258203ad5021090933e5c04707d5ca8cc9e73c94a7894ea9f"},
+]
+
[[package]]
name = "pettingzoo"
version = "1.24.3"
@@ -2584,6 +4447,20 @@ other = ["pillow (>=8.0.1)"]
sisl = ["box2d-py (==2.3.5)", "pygame (==2.3.0)", "pymunk (==6.2.0)", "scipy (>=1.4.1)"]
testing = ["AutoROM", "pre-commit", "pynput", "pytest", "pytest-cov", "pytest-markdown-docs", "pytest-xdist"]
+[[package]]
+name = "pexpect"
+version = "4.9.0"
+description = "Pexpect allows easy control of interactive console applications."
+optional = true
+python-versions = "*"
+files = [
+ {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"},
+ {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"},
+]
+
+[package.dependencies]
+ptyprocess = ">=0.5"
+
[[package]]
name = "pfzy"
version = "0.3.4"
@@ -2695,21 +4572,51 @@ tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "pa
typing = ["typing-extensions"]
xmp = ["defusedxml"]
+[[package]]
+name = "pixel-ring"
+version = "0.1.0"
+description = "respeaker series pixel ring library"
+optional = true
+python-versions = "*"
+files = [
+ {file = "pixel-ring-0.1.0.tar.gz", hash = "sha256:9480f23b58ccb912321b989d00e9d31f087f7bbcd8d970fca0fb319853d03270"},
+ {file = "pixel_ring-0.1.0-py2.py3-none-any.whl", hash = "sha256:c0fa51beb67be81b1f6ab058f651c489d69b47fb884d4361a0cf7594f093885b"},
+]
+
+[package.dependencies]
+pyusb = "*"
+spidev = "*"
+
[[package]]
name = "platformdirs"
-version = "4.2.2"
+version = "4.3.6"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
optional = false
python-versions = ">=3.8"
files = [
- {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"},
- {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"},
+ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"},
+ {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"},
]
[package.extras]
-docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"]
-test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"]
-type = ["mypy (>=1.8)"]
+docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"]
+test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"]
+type = ["mypy (>=1.11.2)"]
+
+[[package]]
+name = "plotly"
+version = "5.24.1"
+description = "An open-source, interactive data visualization library for Python"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "plotly-5.24.1-py3-none-any.whl", hash = "sha256:f67073a1e637eb0dc3e46324d9d51e2fe76e9727c892dde64ddf1e1b51f29089"},
+ {file = "plotly-5.24.1.tar.gz", hash = "sha256:dbc8ac8339d248a4bcc36e08a5659bacfe1b079390b8953533f4eb22169b4bae"},
+]
+
+[package.dependencies]
+packaging = "*"
+tenacity = ">=6.2.0"
[[package]]
name = "pluggy"
@@ -2728,13 +4635,13 @@ testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "pre-commit"
-version = "3.7.1"
+version = "4.0.1"
description = "A framework for managing and maintaining multi-language pre-commit hooks."
optional = true
python-versions = ">=3.9"
files = [
- {file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"},
- {file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"},
+ {file = "pre_commit-4.0.1-py2.py3-none-any.whl", hash = "sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878"},
+ {file = "pre_commit-4.0.1.tar.gz", hash = "sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2"},
]
[package.dependencies]
@@ -2744,38 +4651,159 @@ nodeenv = ">=0.11.1"
pyyaml = ">=5.1"
virtualenv = ">=20.10.0"
+[[package]]
+name = "prometheus-client"
+version = "0.21.0"
+description = "Python client for the Prometheus monitoring system."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "prometheus_client-0.21.0-py3-none-any.whl", hash = "sha256:4fa6b4dd0ac16d58bb587c04b1caae65b8c5043e85f778f42f5f632f6af2e166"},
+ {file = "prometheus_client-0.21.0.tar.gz", hash = "sha256:96c83c606b71ff2b0a433c98889d275f51ffec6c5e267de37c7a2b5c9aa9233e"},
+]
+
+[package.extras]
+twisted = ["twisted"]
+
[[package]]
name = "prompt-toolkit"
-version = "3.0.47"
+version = "3.0.48"
description = "Library for building powerful interactive command lines in Python"
optional = false
python-versions = ">=3.7.0"
files = [
- {file = "prompt_toolkit-3.0.47-py3-none-any.whl", hash = "sha256:0d7bfa67001d5e39d02c224b663abc33687405033a8c422d0d675a5a13361d10"},
- {file = "prompt_toolkit-3.0.47.tar.gz", hash = "sha256:1e1b29cb58080b1e69f207c893a1a7bf16d127a5c30c9d17a25a5d77792e5360"},
+ {file = "prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e"},
+ {file = "prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90"},
]
[package.dependencies]
wcwidth = "*"
+[[package]]
+name = "propcache"
+version = "0.2.0"
+description = "Accelerated property cache"
+optional = false
+python-versions = ">=3.8"
+files = [
+ {file = "propcache-0.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c5869b8fd70b81835a6f187c5fdbe67917a04d7e52b6e7cc4e5fe39d55c39d58"},
+ {file = "propcache-0.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:952e0d9d07609d9c5be361f33b0d6d650cd2bae393aabb11d9b719364521984b"},
+ {file = "propcache-0.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:33ac8f098df0585c0b53009f039dfd913b38c1d2edafed0cedcc0c32a05aa110"},
+ {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e48e8875e6c13909c800fa344cd54cc4b2b0db1d5f911f840458a500fde2c2"},
+ {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388f3217649d6d59292b722d940d4d2e1e6a7003259eb835724092a1cca0203a"},
+ {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f571aea50ba5623c308aa146eb650eebf7dbe0fd8c5d946e28343cb3b5aad577"},
+ {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3dfafb44f7bb35c0c06eda6b2ab4bfd58f02729e7c4045e179f9a861b07c9850"},
+ {file = "propcache-0.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3ebe9a75be7ab0b7da2464a77bb27febcb4fab46a34f9288f39d74833db7f61"},
+ {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d2f0d0f976985f85dfb5f3d685697ef769faa6b71993b46b295cdbbd6be8cc37"},
+ {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a3dc1a4b165283bd865e8f8cb5f0c64c05001e0718ed06250d8cac9bec115b48"},
+ {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9e0f07b42d2a50c7dd2d8675d50f7343d998c64008f1da5fef888396b7f84630"},
+ {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e63e3e1e0271f374ed489ff5ee73d4b6e7c60710e1f76af5f0e1a6117cd26394"},
+ {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:56bb5c98f058a41bb58eead194b4db8c05b088c93d94d5161728515bd52b052b"},
+ {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7665f04d0c7f26ff8bb534e1c65068409bf4687aa2534faf7104d7182debb336"},
+ {file = "propcache-0.2.0-cp310-cp310-win32.whl", hash = "sha256:7cf18abf9764746b9c8704774d8b06714bcb0a63641518a3a89c7f85cc02c2ad"},
+ {file = "propcache-0.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:cfac69017ef97db2438efb854edf24f5a29fd09a536ff3a992b75990720cdc99"},
+ {file = "propcache-0.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:63f13bf09cc3336eb04a837490b8f332e0db41da66995c9fd1ba04552e516354"},
+ {file = "propcache-0.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608cce1da6f2672a56b24a015b42db4ac612ee709f3d29f27a00c943d9e851de"},
+ {file = "propcache-0.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:466c219deee4536fbc83c08d09115249db301550625c7fef1c5563a584c9bc87"},
+ {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc2db02409338bf36590aa985a461b2c96fce91f8e7e0f14c50c5fcc4f229016"},
+ {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6ed8db0a556343d566a5c124ee483ae113acc9a557a807d439bcecc44e7dfbb"},
+ {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91997d9cb4a325b60d4e3f20967f8eb08dfcb32b22554d5ef78e6fd1dda743a2"},
+ {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c7dde9e533c0a49d802b4f3f218fa9ad0a1ce21f2c2eb80d5216565202acab4"},
+ {file = "propcache-0.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffcad6c564fe6b9b8916c1aefbb37a362deebf9394bd2974e9d84232e3e08504"},
+ {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:97a58a28bcf63284e8b4d7b460cbee1edaab24634e82059c7b8c09e65284f178"},
+ {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:945db8ee295d3af9dbdbb698cce9bbc5c59b5c3fe328bbc4387f59a8a35f998d"},
+ {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39e104da444a34830751715f45ef9fc537475ba21b7f1f5b0f4d71a3b60d7fe2"},
+ {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c5ecca8f9bab618340c8e848d340baf68bcd8ad90a8ecd7a4524a81c1764b3db"},
+ {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c436130cc779806bdf5d5fae0d848713105472b8566b75ff70048c47d3961c5b"},
+ {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:191db28dc6dcd29d1a3e063c3be0b40688ed76434622c53a284e5427565bbd9b"},
+ {file = "propcache-0.2.0-cp311-cp311-win32.whl", hash = "sha256:5f2564ec89058ee7c7989a7b719115bdfe2a2fb8e7a4543b8d1c0cc4cf6478c1"},
+ {file = "propcache-0.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6e2e54267980349b723cff366d1e29b138b9a60fa376664a157a342689553f71"},
+ {file = "propcache-0.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2ee7606193fb267be4b2e3b32714f2d58cad27217638db98a60f9efb5efeccc2"},
+ {file = "propcache-0.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:91ee8fc02ca52e24bcb77b234f22afc03288e1dafbb1f88fe24db308910c4ac7"},
+ {file = "propcache-0.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e900bad2a8456d00a113cad8c13343f3b1f327534e3589acc2219729237a2e8"},
+ {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f52a68c21363c45297aca15561812d542f8fc683c85201df0bebe209e349f793"},
+ {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e41d67757ff4fbc8ef2af99b338bfb955010444b92929e9e55a6d4dcc3c4f09"},
+ {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a64e32f8bd94c105cc27f42d3b658902b5bcc947ece3c8fe7bc1b05982f60e89"},
+ {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55346705687dbd7ef0d77883ab4f6fabc48232f587925bdaf95219bae072491e"},
+ {file = "propcache-0.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00181262b17e517df2cd85656fcd6b4e70946fe62cd625b9d74ac9977b64d8d9"},
+ {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6994984550eaf25dd7fc7bd1b700ff45c894149341725bb4edc67f0ffa94efa4"},
+ {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:56295eb1e5f3aecd516d91b00cfd8bf3a13991de5a479df9e27dd569ea23959c"},
+ {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:439e76255daa0f8151d3cb325f6dd4a3e93043e6403e6491813bcaaaa8733887"},
+ {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f6475a1b2ecb310c98c28d271a30df74f9dd436ee46d09236a6b750a7599ce57"},
+ {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3444cdba6628accf384e349014084b1cacd866fbb88433cd9d279d90a54e0b23"},
+ {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4a9d9b4d0a9b38d1c391bb4ad24aa65f306c6f01b512e10a8a34a2dc5675d348"},
+ {file = "propcache-0.2.0-cp312-cp312-win32.whl", hash = "sha256:69d3a98eebae99a420d4b28756c8ce6ea5a29291baf2dc9ff9414b42676f61d5"},
+ {file = "propcache-0.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ad9c9b99b05f163109466638bd30ada1722abb01bbb85c739c50b6dc11f92dc3"},
+ {file = "propcache-0.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ecddc221a077a8132cf7c747d5352a15ed763b674c0448d811f408bf803d9ad7"},
+ {file = "propcache-0.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0e53cb83fdd61cbd67202735e6a6687a7b491c8742dfc39c9e01e80354956763"},
+ {file = "propcache-0.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92fe151145a990c22cbccf9ae15cae8ae9eddabfc949a219c9f667877e40853d"},
+ {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6a21ef516d36909931a2967621eecb256018aeb11fc48656e3257e73e2e247a"},
+ {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f88a4095e913f98988f5b338c1d4d5d07dbb0b6bad19892fd447484e483ba6b"},
+ {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a5b3bb545ead161be780ee85a2b54fdf7092815995661947812dde94a40f6fb"},
+ {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67aeb72e0f482709991aa91345a831d0b707d16b0257e8ef88a2ad246a7280bf"},
+ {file = "propcache-0.2.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c997f8c44ec9b9b0bcbf2d422cc00a1d9b9c681f56efa6ca149a941e5560da2"},
+ {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a66df3d4992bc1d725b9aa803e8c5a66c010c65c741ad901e260ece77f58d2f"},
+ {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:3ebbcf2a07621f29638799828b8d8668c421bfb94c6cb04269130d8de4fb7136"},
+ {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1235c01ddaa80da8235741e80815ce381c5267f96cc49b1477fdcf8c047ef325"},
+ {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3947483a381259c06921612550867b37d22e1df6d6d7e8361264b6d037595f44"},
+ {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d5bed7f9805cc29c780f3aee05de3262ee7ce1f47083cfe9f77471e9d6777e83"},
+ {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4a91d44379f45f5e540971d41e4626dacd7f01004826a18cb048e7da7e96544"},
+ {file = "propcache-0.2.0-cp313-cp313-win32.whl", hash = "sha256:f902804113e032e2cdf8c71015651c97af6418363bea8d78dc0911d56c335032"},
+ {file = "propcache-0.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:8f188cfcc64fb1266f4684206c9de0e80f54622c3f22a910cbd200478aeae61e"},
+ {file = "propcache-0.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:53d1bd3f979ed529f0805dd35ddaca330f80a9a6d90bc0121d2ff398f8ed8861"},
+ {file = "propcache-0.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:83928404adf8fb3d26793665633ea79b7361efa0287dfbd372a7e74311d51ee6"},
+ {file = "propcache-0.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77a86c261679ea5f3896ec060be9dc8e365788248cc1e049632a1be682442063"},
+ {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:218db2a3c297a3768c11a34812e63b3ac1c3234c3a086def9c0fee50d35add1f"},
+ {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7735e82e3498c27bcb2d17cb65d62c14f1100b71723b68362872bca7d0913d90"},
+ {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:20a617c776f520c3875cf4511e0d1db847a076d720714ae35ffe0df3e440be68"},
+ {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67b69535c870670c9f9b14a75d28baa32221d06f6b6fa6f77a0a13c5a7b0a5b9"},
+ {file = "propcache-0.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4569158070180c3855e9c0791c56be3ceeb192defa2cdf6a3f39e54319e56b89"},
+ {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:db47514ffdbd91ccdc7e6f8407aac4ee94cc871b15b577c1c324236b013ddd04"},
+ {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:2a60ad3e2553a74168d275a0ef35e8c0a965448ffbc3b300ab3a5bb9956c2162"},
+ {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:662dd62358bdeaca0aee5761de8727cfd6861432e3bb828dc2a693aa0471a563"},
+ {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:25a1f88b471b3bc911d18b935ecb7115dff3a192b6fef46f0bfaf71ff4f12418"},
+ {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:f60f0ac7005b9f5a6091009b09a419ace1610e163fa5deaba5ce3484341840e7"},
+ {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:74acd6e291f885678631b7ebc85d2d4aec458dd849b8c841b57ef04047833bed"},
+ {file = "propcache-0.2.0-cp38-cp38-win32.whl", hash = "sha256:d9b6ddac6408194e934002a69bcaadbc88c10b5f38fb9307779d1c629181815d"},
+ {file = "propcache-0.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:676135dcf3262c9c5081cc8f19ad55c8a64e3f7282a21266d05544450bffc3a5"},
+ {file = "propcache-0.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:25c8d773a62ce0451b020c7b29a35cfbc05de8b291163a7a0f3b7904f27253e6"},
+ {file = "propcache-0.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:375a12d7556d462dc64d70475a9ee5982465fbb3d2b364f16b86ba9135793638"},
+ {file = "propcache-0.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1ec43d76b9677637a89d6ab86e1fef70d739217fefa208c65352ecf0282be957"},
+ {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f45eec587dafd4b2d41ac189c2156461ebd0c1082d2fe7013571598abb8505d1"},
+ {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc092ba439d91df90aea38168e11f75c655880c12782facf5cf9c00f3d42b562"},
+ {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa1076244f54bb76e65e22cb6910365779d5c3d71d1f18b275f1dfc7b0d71b4d"},
+ {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:682a7c79a2fbf40f5dbb1eb6bfe2cd865376deeac65acf9beb607505dced9e12"},
+ {file = "propcache-0.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e40876731f99b6f3c897b66b803c9e1c07a989b366c6b5b475fafd1f7ba3fb8"},
+ {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:363ea8cd3c5cb6679f1c2f5f1f9669587361c062e4899fce56758efa928728f8"},
+ {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:140fbf08ab3588b3468932974a9331aff43c0ab8a2ec2c608b6d7d1756dbb6cb"},
+ {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e70fac33e8b4ac63dfc4c956fd7d85a0b1139adcfc0d964ce288b7c527537fea"},
+ {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b33d7a286c0dc1a15f5fc864cc48ae92a846df287ceac2dd499926c3801054a6"},
+ {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f6d5749fdd33d90e34c2efb174c7e236829147a2713334d708746e94c4bde40d"},
+ {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22aa8f2272d81d9317ff5756bb108021a056805ce63dd3630e27d042c8092798"},
+ {file = "propcache-0.2.0-cp39-cp39-win32.whl", hash = "sha256:73e4b40ea0eda421b115248d7e79b59214411109a5bc47d0d48e4c73e3b8fcf9"},
+ {file = "propcache-0.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:9517d5e9e0731957468c29dbfd0f976736a0e55afaea843726e887f36fe017df"},
+ {file = "propcache-0.2.0-py3-none-any.whl", hash = "sha256:2ccc28197af5313706511fab3a8b66dcd6da067a1331372c82ea1cb74285e036"},
+ {file = "propcache-0.2.0.tar.gz", hash = "sha256:df81779732feb9d01e5d513fad0122efb3d53bbc75f61b2a4f29a020bc985e70"},
+]
+
[[package]]
name = "protobuf"
-version = "5.27.2"
+version = "5.28.2"
description = ""
optional = false
python-versions = ">=3.8"
files = [
- {file = "protobuf-5.27.2-cp310-abi3-win32.whl", hash = "sha256:354d84fac2b0d76062e9b3221f4abbbacdfd2a4d8af36bab0474f3a0bb30ab38"},
- {file = "protobuf-5.27.2-cp310-abi3-win_amd64.whl", hash = "sha256:0e341109c609749d501986b835f667c6e1e24531096cff9d34ae411595e26505"},
- {file = "protobuf-5.27.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a109916aaac42bff84702fb5187f3edadbc7c97fc2c99c5ff81dd15dcce0d1e5"},
- {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:176c12b1f1c880bf7a76d9f7c75822b6a2bc3db2d28baa4d300e8ce4cde7409b"},
- {file = "protobuf-5.27.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:b848dbe1d57ed7c191dfc4ea64b8b004a3f9ece4bf4d0d80a367b76df20bf36e"},
- {file = "protobuf-5.27.2-cp38-cp38-win32.whl", hash = "sha256:4fadd8d83e1992eed0248bc50a4a6361dc31bcccc84388c54c86e530b7f58863"},
- {file = "protobuf-5.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:610e700f02469c4a997e58e328cac6f305f649826853813177e6290416e846c6"},
- {file = "protobuf-5.27.2-cp39-cp39-win32.whl", hash = "sha256:9e8f199bf7f97bd7ecebffcae45ebf9527603549b2b562df0fbc6d4d688f14ca"},
- {file = "protobuf-5.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:7fc3add9e6003e026da5fc9e59b131b8f22b428b991ccd53e2af8071687b4fce"},
- {file = "protobuf-5.27.2-py3-none-any.whl", hash = "sha256:54330f07e4949d09614707c48b06d1a22f8ffb5763c159efd5c0928326a91470"},
- {file = "protobuf-5.27.2.tar.gz", hash = "sha256:f3ecdef226b9af856075f28227ff2c90ce3a594d092c39bee5513573f25e2714"},
+ {file = "protobuf-5.28.2-cp310-abi3-win32.whl", hash = "sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d"},
+ {file = "protobuf-5.28.2-cp310-abi3-win_amd64.whl", hash = "sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132"},
+ {file = "protobuf-5.28.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7"},
+ {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f"},
+ {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f"},
+ {file = "protobuf-5.28.2-cp38-cp38-win32.whl", hash = "sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0"},
+ {file = "protobuf-5.28.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3"},
+ {file = "protobuf-5.28.2-cp39-cp39-win32.whl", hash = "sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36"},
+ {file = "protobuf-5.28.2-cp39-cp39-win_amd64.whl", hash = "sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276"},
+ {file = "protobuf-5.28.2-py3-none-any.whl", hash = "sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece"},
+ {file = "protobuf-5.28.2.tar.gz", hash = "sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0"},
]
[[package]]
@@ -2807,6 +4835,31 @@ files = [
[package.extras]
test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"]
+[[package]]
+name = "ptyprocess"
+version = "0.7.0"
+description = "Run a subprocess in a pseudo terminal"
+optional = true
+python-versions = "*"
+files = [
+ {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"},
+ {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"},
+]
+
+[[package]]
+name = "pure-eval"
+version = "0.2.3"
+description = "Safely evaluate AST nodes without side effects"
+optional = true
+python-versions = "*"
+files = [
+ {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"},
+ {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"},
+]
+
+[package.extras]
+tests = ["pytest"]
+
[[package]]
name = "pyarrow"
version = "17.0.0"
@@ -2859,46 +4912,81 @@ numpy = ">=1.16.6"
test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"]
[[package]]
-name = "pyarrow-hotfix"
-version = "0.6"
-description = ""
-optional = false
-python-versions = ">=3.5"
+name = "pyaudio"
+version = "0.2.14"
+description = "Cross-platform audio I/O with PortAudio"
+optional = true
+python-versions = "*"
files = [
- {file = "pyarrow_hotfix-0.6-py3-none-any.whl", hash = "sha256:dcc9ae2d220dff0083be6a9aa8e0cdee5182ad358d4931fce825c545e5c89178"},
- {file = "pyarrow_hotfix-0.6.tar.gz", hash = "sha256:79d3e030f7ff890d408a100ac16d6f00b14d44a502d7897cd9fc3e3a534e9945"},
+ {file = "PyAudio-0.2.14-cp310-cp310-win32.whl", hash = "sha256:126065b5e82a1c03ba16e7c0404d8f54e17368836e7d2d92427358ad44fefe61"},
+ {file = "PyAudio-0.2.14-cp310-cp310-win_amd64.whl", hash = "sha256:2a166fc88d435a2779810dd2678354adc33499e9d4d7f937f28b20cc55893e83"},
+ {file = "PyAudio-0.2.14-cp311-cp311-win32.whl", hash = "sha256:506b32a595f8693811682ab4b127602d404df7dfc453b499c91a80d0f7bad289"},
+ {file = "PyAudio-0.2.14-cp311-cp311-win_amd64.whl", hash = "sha256:bbeb01d36a2f472ae5ee5e1451cacc42112986abe622f735bb870a5db77cf903"},
+ {file = "PyAudio-0.2.14-cp312-cp312-win32.whl", hash = "sha256:5fce4bcdd2e0e8c063d835dbe2860dac46437506af509353c7f8114d4bacbd5b"},
+ {file = "PyAudio-0.2.14-cp312-cp312-win_amd64.whl", hash = "sha256:12f2f1ba04e06ff95d80700a78967897a489c05e093e3bffa05a84ed9c0a7fa3"},
+ {file = "PyAudio-0.2.14-cp38-cp38-win32.whl", hash = "sha256:858caf35b05c26d8fc62f1efa2e8f53d5fa1a01164842bd622f70ddc41f55000"},
+ {file = "PyAudio-0.2.14-cp38-cp38-win_amd64.whl", hash = "sha256:2dac0d6d675fe7e181ba88f2de88d321059b69abd52e3f4934a8878e03a7a074"},
+ {file = "PyAudio-0.2.14-cp39-cp39-win32.whl", hash = "sha256:f745109634a7c19fa4d6b8b7d6967c3123d988c9ade0cd35d4295ee1acdb53e9"},
+ {file = "PyAudio-0.2.14-cp39-cp39-win_amd64.whl", hash = "sha256:009f357ee5aa6bc8eb19d69921cd30e98c42cddd34210615d592a71d09c4bd57"},
+ {file = "PyAudio-0.2.14.tar.gz", hash = "sha256:78dfff3879b4994d1f4fc6485646a57755c6ee3c19647a491f790a0895bd2f87"},
]
+[package.extras]
+test = ["numpy"]
+
[[package]]
name = "pyav"
-version = "12.2.0"
+version = "13.1.0"
description = "Pythonic bindings for FFmpeg's libraries."
optional = false
python-versions = ">=3.10"
files = [
- {file = "pyav-12.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f00df6661b56271bc21b53397f7b5cdffe3450723cd358eb69c8f6428f17500"},
- {file = "pyav-12.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bfbd521329be96f9e741ec25eccea19bb3a6c7ef5ee540f318fb1a38b5e690f0"},
- {file = "pyav-12.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84ffc7b8f4bcb671a73cb4d98d6bde9f7b4a6dabc402d8004f633375fe75cd15"},
- {file = "pyav-12.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd6390b2834d6bc6cf39c72a8b461fe0d42993dac7c6739af7e41033e1a55d5"},
- {file = "pyav-12.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:ace6a4de2f11e1dd8d0e5a5517ec2ccd3fe21ce7b1215aa9ec417e129897c84c"},
- {file = "pyav-12.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d9806ec843ac216eddbd8127c2f0fc8a0f78f0edb6fb5ea06bdb8ec1226762e7"},
- {file = "pyav-12.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c2889639e228abae0e7642768ceaefa30a87fe442b79e79a1709b531df7fded1"},
- {file = "pyav-12.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d50626b600abaa849b547600445538e3ec8a39e51ea20550d2ac5ef21281caa1"},
- {file = "pyav-12.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57bf6e52b59099b25b16e3778c241c607c6b142dcceb17b9f04ddea1b53d9c37"},
- {file = "pyav-12.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:d5463692bc9f92540f2d51d5851aae9b03a26a384d8ea62e02e942968c31bd6b"},
- {file = "pyav-12.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:796bc264e5a6cb4a3f052eda95c57891a8df60c9bbf6f9cd4fad6a1a6f3fb0a1"},
- {file = "pyav-12.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3237e76fef91aa132cff3581c28188b18bad3da07bf228eda573a683acab7111"},
- {file = "pyav-12.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52dc979a315e40081bb1816d1f265507ee62569282718d7d602e508c3daaf034"},
- {file = "pyav-12.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6175dc56d14b04b26dd36e5d12e7bc2961fa365b6e2669d2f900b2f860fdb243"},
- {file = "pyav-12.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:5ce8e1219e84a71d776ef25f4fda1c7bbeed517339db7f9c7e1f99e6f96f0a2b"},
- {file = "pyav-12.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f7a3f91ec889e44e4f246841c433f4a5e9c20a308015d1d6ff9d94ae182eabc3"},
- {file = "pyav-12.2.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f1fd8e5a1d5ca3dc377400fec2b04c15ed7abf657cc9ed1522b2481bc4f31af2"},
- {file = "pyav-12.2.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8561a399a7dfa30bc99b34a1a72790128593798d741423984389f31aa911d799"},
- {file = "pyav-12.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0938f1e26e3ad55b6ffe588255a8b4e306d9470fbf2cc6819644b75f10fcb7d4"},
- {file = "pyav-12.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:8b23bc2c36f84a924ad8407da5a287b2f1533af94bb42007331bc2ebab429a83"},
- {file = "pyav-12.2.0.tar.gz", hash = "sha256:6e77f8509c284e972fdac8221dc3f046f9dd69651f4218249d0910c71e041ae2"},
+ {file = "pyav-13.1.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:64a81022e60dfba7dee9767a6fd150f42293855ea127979b2f38a3fd86f908fd"},
+ {file = "pyav-13.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3971089334cc91e331c5014c8ea5fcbca0ccc82eb14952c128ce50570010a3cf"},
+ {file = "pyav-13.1.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:154394ba47b4b55d4abda3e66e2b0a79e7b046c983191cb6113ea14769eea53a"},
+ {file = "pyav-13.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b48efcde03b9952ece3c8a8d9d74c685ff84ab91b60ea0ae6960638e30f3f31"},
+ {file = "pyav-13.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:8404d5a5eef975862a35f2338ab8e7ae5d7a7f9af1ac748edef2aca4543f44cd"},
+ {file = "pyav-13.1.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:a75d67dc80ea87f3987fafa5699410047af818b20691046c76d12e18faf3da68"},
+ {file = "pyav-13.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4571175c8511d36128e94955b8cc64b0452e16da42c81ceae745946f88abf477"},
+ {file = "pyav-13.1.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d7e1bd1157b21ca116c71696be62cd12bcaefc32179fd99efad90e0a76d300d3"},
+ {file = "pyav-13.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:126386f2f8a0c57487a3ad947ac573385d41326b5ff111783135cc56a8869261"},
+ {file = "pyav-13.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:57d94282ffd445ab055c36e150fee1a4a066e0aee259260c82792dbd349ec08d"},
+ {file = "pyav-13.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b2daf24cbc01ee666c4893e69aac8fc65bab598ea0029382857930f652a5e5ff"},
+ {file = "pyav-13.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83e3a67e2038b8cfd1d5dd2d1a1756ac1143a4c223b1723e64ac8bdb2045fb6a"},
+ {file = "pyav-13.1.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24de515886366b2c952e3827e7fb6466ad06f40b5cb34595a3f922899727be2b"},
+ {file = "pyav-13.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66befb4172facfaaf7f3be94b1659051378b0741f087d5b46d2a25b6bce34b4f"},
+ {file = "pyav-13.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:a65d060fceee59e5a1dd70e64bf6ffca55fff2b596af906b206d8ba0057bbdc5"},
+ {file = "pyav-13.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8450899845220a2a4f3ecc3eba0d5f864c169d98a9892be75447e59480162a09"},
+ {file = "pyav-13.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6b21df5daadbb019c4612cc89923202ad7a4dd259be905eba56887a14a344861"},
+ {file = "pyav-13.1.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:21668b5ea9c4f046f61193a555d3deb2ca633b2ffb27a22a3b0eb03e8da64992"},
+ {file = "pyav-13.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ae2413955b7d76826d214d3a5b719714f352de7de318e45275811fa07b9efe3"},
+ {file = "pyav-13.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a3ba8764bbf958e6c94b0dc7b07f670b4a759a157547a69cddc58eabba8aea1d"},
+ {file = "pyav-13.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c92ef209e12660c6a75f81c9d228adc1e07294b875bf91d9b2a58c44a728b2d3"},
+ {file = "pyav-13.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:2e1855824313c17367c5ba658cf99d8b3169e0c3e0bdef5aa87a4c472c46d72b"},
+ {file = "pyav-13.1.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c7a2eb79af1d3414509e31631a1b837b011eba4a21e311ae1308eca95a9f4db"},
+ {file = "pyav-13.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69566d6b5438259e2e4adc2975591d513b7f1280fbf4ed3e0901be10a4567470"},
+ {file = "pyav-13.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2960397dd97d7462effe8e6696557a91f24c2841edf391b0355734db8e4b02cd"},
+ {file = "pyav-13.1.0.tar.gz", hash = "sha256:7049f4df6f94b4b727c1339a094f29c4178f3e0c290a01b9fcf0190a9890704c"},
+]
+
+[[package]]
+name = "pycollada"
+version = "0.8"
+description = "python library for reading and writing collada documents"
+optional = true
+python-versions = "*"
+files = [
+ {file = "pycollada-0.8.tar.gz", hash = "sha256:f3a3759cc4cec1d59e932aad74399dbcf541d18862aad903c770040da42af20e"},
]
+[package.dependencies]
+numpy = "*"
+python-dateutil = ">=2.2"
+
+[package.extras]
+prettyprint = ["lxml"]
+validation = ["lxml"]
+
[[package]]
name = "pycparser"
version = "2.22"
@@ -2912,70 +5000,98 @@ files = [
[[package]]
name = "pygame"
-version = "2.6.0"
+version = "2.6.1"
description = "Python Game Development"
optional = true
python-versions = ">=3.6"
files = [
- {file = "pygame-2.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5707aa9d029752495b3eddc1edff62e0e390a02f699b0f1ce77fe0b8c70ea4f"},
- {file = "pygame-2.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d3ed0547368733b854c0d9981c982a3cdfabfa01b477d095c57bf47f2199da44"},
- {file = "pygame-2.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6050f3e95f1f16602153d616b52619c6a2041cee7040eb529f65689e9633fc3e"},
- {file = "pygame-2.6.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89be55b7e9e22e0eea08af9d6cfb97aed5da780f0b3a035803437d481a16d972"},
- {file = "pygame-2.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d65fb222eea1294cfc8206d9e5754d476a1673eb2783c03c4f70e0455320274"},
- {file = "pygame-2.6.0-cp310-cp310-win32.whl", hash = "sha256:71eebb9803cb350298de188fb7cdd3ebf13299f78d59a71c7e81efc649aae348"},
- {file = "pygame-2.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:1551852a2cd5b4139a752888f6cbeeb4a96fc0fe6e6f3f8b9d9784eb8fceab13"},
- {file = "pygame-2.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f6e5e6c010b1bf429388acf4d41d7ab2f7ad8fbf241d0db822102d35c9a2eb84"},
- {file = "pygame-2.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:99902f4a2f6a338057200d99b5120a600c27a9f629ca012a9b0087c045508d08"},
- {file = "pygame-2.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a284664978a1989c1e31a0888b2f70cfbcbafdfa3bb310e750b0d3366416225"},
- {file = "pygame-2.6.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:829623cee298b3dbaa1dd9f52c3051ae82f04cad7708c8c67cb9a1a4b8fd3c0b"},
- {file = "pygame-2.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6acf7949ed764487d51123f4f3606e8f76b0df167fef12ef73ef423c35fdea39"},
- {file = "pygame-2.6.0-cp311-cp311-win32.whl", hash = "sha256:3f809560c99bd1fb4716610eca0cd36412528f03da1a63841a347b71d0c604ee"},
- {file = "pygame-2.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:6897ab87f9193510a774a3483e00debfe166f340ca159f544ef99807e2a44ec4"},
- {file = "pygame-2.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b834711ebc8b9d0c2a5f9bfae4403dd277b2c61bcb689e1aa630d01a1ebcf40a"},
- {file = "pygame-2.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b5ac288655e8a31a303cc286e79cc57979ed2ba19c3a14042d4b6391c1d3bed2"},
- {file = "pygame-2.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d666667b7826b0a7921b8ce0a282ba5281dfa106976c1a3b24e32a0af65ad3b1"},
- {file = "pygame-2.6.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd8848a37a7cee37854c7efb8d451334477c9f8ce7ac339c079e724dc1334a76"},
- {file = "pygame-2.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:315e7b3c1c573984f549ac5da9778ac4709b3b4e3a4061050d94eab63fa4fe31"},
- {file = "pygame-2.6.0-cp312-cp312-win32.whl", hash = "sha256:e44bde0840cc21a91c9d368846ac538d106cf0668be1a6030f48df139609d1e8"},
- {file = "pygame-2.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:1c429824b1f881a7a5ce3b5c2014d3d182aa45a22cea33c8347a3971a5446907"},
- {file = "pygame-2.6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b832200bd8b6fc485e087bf3ef7ec1a21437258536413a5386088f5dcd3a9870"},
- {file = "pygame-2.6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:098029d01a46ea4e30620dfb7c28a577070b456c8fc96350dde05f85c0bf51b5"},
- {file = "pygame-2.6.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a858bbdeac5ec473ec9e726c55fb8fbdc2f4aad7c55110e899883738071c7c9b"},
- {file = "pygame-2.6.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f908762941fd99e1f66d1211d26383184f6045c45673443138b214bf48a89aa"},
- {file = "pygame-2.6.0-cp36-cp36m-win32.whl", hash = "sha256:4a63daee99d050f47d6ec7fa7dbd1c6597b8f082cdd58b6918d382d2bc31262d"},
- {file = "pygame-2.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:ace471b3849d68968e5427fc01166ef5afaf552a5c442fc2c28d3b7226786f55"},
- {file = "pygame-2.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fea019713d0c89dfd5909225aa933010100035d1cd30e6c936e8b6f00529fb80"},
- {file = "pygame-2.6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:249dbf2d51d9f0266009a380ccf0532e1a57614a1528bb2f89a802b01d61f93e"},
- {file = "pygame-2.6.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb51533ee3204e8160600b0de34eaad70eb913a182c94a7777b6051e8fc52f1"},
- {file = "pygame-2.6.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f637636a44712e94e5601ec69160a080214626471983dfb0b5b68aa0c61563d"},
- {file = "pygame-2.6.0-cp37-cp37m-win32.whl", hash = "sha256:e432156b6f346f4cc6cab03ce9657600093390f4c9b10bf458716b25beebfe33"},
- {file = "pygame-2.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a0194652db7874bdde7dfc69d659ca954544c012e04ae527151325bfb970f423"},
- {file = "pygame-2.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eae3ee62cc172e268121d5bd9dc406a67094d33517de3a91de3323d6ae23eb02"},
- {file = "pygame-2.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f6a58b0a5a8740a3c2cf6fc5366888bd4514561253437f093c12a9ab4fb3ecae"},
- {file = "pygame-2.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c71da36997dc7b9b4ee973fa3a5d4a6cfb2149161b5b1c08b712d2f13a63ccfe"},
- {file = "pygame-2.6.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b86771801a7fc10d9a62218f27f1d5c13341c3a27394aa25578443a9cd199830"},
- {file = "pygame-2.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4928f3acf5a9ce5fbab384c21f1245304535ffd5fb167ae92a6b4d3cdb55a3b6"},
- {file = "pygame-2.6.0-cp38-cp38-win32.whl", hash = "sha256:4faab2df9926c4d31215986536b112f0d76f711cf02f395805f1ff5df8fd55fc"},
- {file = "pygame-2.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:afbb8d97aed93dfb116fe105603dacb68f8dab05b978a40a9e4ab1b6c1f683fd"},
- {file = "pygame-2.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d11f3646b53819892f4a731e80b8589a9140343d0d4b86b826802191b241228c"},
- {file = "pygame-2.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5ef92ed93c354eabff4b85e457d4d6980115004ec7ff52a19fd38b929c3b80fb"},
- {file = "pygame-2.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc1795f2e36302882546faacd5a0191463c4f4ae2b90e7c334a7733aa4190d2"},
- {file = "pygame-2.6.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e92294fcc85c4955fe5bc6a0404e4cc870808005dc8f359e881544e3cc214108"},
- {file = "pygame-2.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0cb7bdf3ee0233a3ac02ef777c01dfe315e6d4670f1312c83b91c1ef124359a"},
- {file = "pygame-2.6.0-cp39-cp39-win32.whl", hash = "sha256:ac906478ae489bb837bf6d2ae1eb9261d658aa2c34fa5b283027a04149bda81a"},
- {file = "pygame-2.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:92cf12a9722f6f0bdc5520d8925a8f085cff9c054a2ea462fc409cba3781be27"},
- {file = "pygame-2.6.0-pp36-pypy36_pp73-win32.whl", hash = "sha256:a6636f452fdaddf604a060849feb84c056930b6a3c036214f607741f16aac942"},
- {file = "pygame-2.6.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dc242dc15d067d10f25c5b12a1da48ca9436d8e2d72353eaf757e83612fba2f"},
- {file = "pygame-2.6.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f82df23598a281c8c342d3c90be213c8fe762a26c15815511f60d0aac6e03a70"},
- {file = "pygame-2.6.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2ed2539bb6bd211fc570b1169dc4a64a74ec5cd95741e62a0ab46bd18fe08e0d"},
- {file = "pygame-2.6.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:904aaf29710c6b03a7e1a65b198f5467ed6525e8e60bdcc5e90ff8584c1d54ea"},
- {file = "pygame-2.6.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fcd28f96f0fffd28e71a98773843074597e10d7f55a098e2e5bcb2bef1bdcbf5"},
- {file = "pygame-2.6.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4fad1ab33443ecd4f958dbbb67fc09fcdc7a37e26c34054e3296fb7e26ad641e"},
- {file = "pygame-2.6.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e909186d4d512add39b662904f0f79b73028fbfc4fbfdaf6f9412aed4e500e9c"},
- {file = "pygame-2.6.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79abcbf6d12fce51a955a0652ccd50b6d0a355baa27799535eaf21efb43433dd"},
- {file = "pygame-2.6.0.tar.gz", hash = "sha256:722d33ae676aa8533c1f955eded966411298831346b8d51a77dad22e46ba3e35"},
+ {file = "pygame-2.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9beeb647e555afb5657111fa83acb74b99ad88761108eaea66472e8b8547b55b"},
+ {file = "pygame-2.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:10e3d2a55f001f6c0a6eb44aa79ea7607091c9352b946692acedb2ac1482f1c9"},
+ {file = "pygame-2.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:816e85000c5d8b02a42b9834f761a5925ef3377d2924e3a7c4c143d2990ce5b8"},
+ {file = "pygame-2.6.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a78fd030d98faab4a8e27878536fdff7518d3e062a72761c552f624ebba5a5f"},
+ {file = "pygame-2.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da3ad64d685f84a34ebe5daacb39fff14f1251acb34c098d760d63fee768f50c"},
+ {file = "pygame-2.6.1-cp310-cp310-win32.whl", hash = "sha256:9dd5c054d4bd875a8caf978b82672f02bec332f52a833a76899220c460bb4b58"},
+ {file = "pygame-2.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:00827aba089355925902d533f9c41e79a799641f03746c50a374dc5c3362e43d"},
+ {file = "pygame-2.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:20349195326a5e82a16e351ed93465a7845a7e2a9af55b7bc1b2110ea3e344e1"},
+ {file = "pygame-2.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f3935459109da4bb0b3901da9904f0a3e52028a3332a355d298b1673a334cf21"},
+ {file = "pygame-2.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c31dbdb5d0217f32764797d21c2752e258e5fb7e895326538d82b5f75a0cd856"},
+ {file = "pygame-2.6.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:173badf82fa198e6888017bea40f511cb28e69ecdd5a72b214e81e4dcd66c3b1"},
+ {file = "pygame-2.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce8cc108b92de9b149b344ad2e25eedbe773af0dc41dfb24d1f07f679b558c60"},
+ {file = "pygame-2.6.1-cp311-cp311-win32.whl", hash = "sha256:811e7b925146d8149d79193652cbb83e0eca0aae66476b1cb310f0f4226b8b5c"},
+ {file = "pygame-2.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:91476902426facd4bb0dad4dc3b2573bc82c95c71b135e0daaea072ed528d299"},
+ {file = "pygame-2.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4ee7f2771f588c966fa2fa8b829be26698c9b4836f82ede5e4edc1a68594942e"},
+ {file = "pygame-2.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c8040ea2ab18c6b255af706ec01355c8a6b08dc48d77fd4ee783f8fc46a843bf"},
+ {file = "pygame-2.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47a6938de93fa610accd4969e638c2aebcb29b2fca518a84c3a39d91ab47116"},
+ {file = "pygame-2.6.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33006f784e1c7d7e466fcb61d5489da59cc5f7eb098712f792a225df1d4e229d"},
+ {file = "pygame-2.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1206125f14cae22c44565c9d333607f1d9f59487b1f1432945dfc809aeaa3e88"},
+ {file = "pygame-2.6.1-cp312-cp312-win32.whl", hash = "sha256:84fc4054e25262140d09d39e094f6880d730199710829902f0d8ceae0213379e"},
+ {file = "pygame-2.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:3a9e7396be0d9633831c3f8d5d82dd63ba373ad65599628294b7a4f8a5a01a65"},
+ {file = "pygame-2.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ae6039f3a55d800db80e8010f387557b528d34d534435e0871326804df2a62f2"},
+ {file = "pygame-2.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2a3a1288e2e9b1e5834e425bedd5ba01a3cd4902b5c2bff8ed4a740ccfe98171"},
+ {file = "pygame-2.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27eb17e3dc9640e4b4683074f1890e2e879827447770470c2aba9f125f74510b"},
+ {file = "pygame-2.6.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c1623180e70a03c4a734deb9bac50fc9c82942ae84a3a220779062128e75f3b"},
+ {file = "pygame-2.6.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef07c0103d79492c21fced9ad68c11c32efa6801ca1920ebfd0f15fb46c78b1c"},
+ {file = "pygame-2.6.1-cp313-cp313-win32.whl", hash = "sha256:3acd8c009317190c2bfd81db681ecef47d5eb108c2151d09596d9c7ea9df5c0e"},
+ {file = "pygame-2.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:813af4fba5d0b2cb8e58f5d95f7910295c34067dcc290d34f1be59c48bd1ea6a"},
+ {file = "pygame-2.6.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:56ffca6059b165bbf64f4b4be23b8068f6a0e220780e4f96ec0bb5ac3c63ec39"},
+ {file = "pygame-2.6.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3bede70ec708057e305815d6546012669226d1d80566785feca9b044216062e7"},
+ {file = "pygame-2.6.1-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f84f15d146d6aa93254008a626c56ef96fed276006202881a47b29757f0cd65a"},
+ {file = "pygame-2.6.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14f9dda45469b254c0f15edaaeaa85d2cc072ff6a83584a265f5d684c7f7efd8"},
+ {file = "pygame-2.6.1-cp36-cp36m-win32.whl", hash = "sha256:28b43190436037e428a5be28fc80cf6615304fd528009f2c688cc828f4ff104b"},
+ {file = "pygame-2.6.1-cp36-cp36m-win_amd64.whl", hash = "sha256:a4b8f04fceddd9a3ac30778d11f0254f59efcd1c382d5801271113cea8b4f2f3"},
+ {file = "pygame-2.6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a620883d589926f157b8f1d1f543183ac52e5c30507dea445e3927ae0bee1c54"},
+ {file = "pygame-2.6.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b46e68cd168f44d0224c670bb72186688fc692d7079715f79d04096757d703d0"},
+ {file = "pygame-2.6.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c0b11356ac96261162d54a2c2b41a41978f00525631b01ec9c4fe26b01c66595"},
+ {file = "pygame-2.6.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:325a84d072d52e3c2921eff02f87c6a74b7e77d71db3bdf53801c6c975f1b6c4"},
+ {file = "pygame-2.6.1-cp37-cp37m-win32.whl", hash = "sha256:2a615d78b2364e86f541458ff41c2a46181b9a1e9eabd97b389282fdf04efbb3"},
+ {file = "pygame-2.6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:94afd1177680d92f9214c54966ad3517d18210c4fbc5d84a0192d218e93647e0"},
+ {file = "pygame-2.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97ac4e13847b6b293ecaffa5ffce9886c98d09c03309406931cc592f0cea6366"},
+ {file = "pygame-2.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d1a7f2b66ac2e4c9583b6d4c6d6f346fb10a3392c04163f537061f86a448ed5c"},
+ {file = "pygame-2.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac3f033d2be4a9e23660a96afe2986df3a6916227538a6a0061bc218c5088507"},
+ {file = "pygame-2.6.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1bf7ab5311bbced70320f1a56701650b4c18231343ae5af42111eea91e0949a"},
+ {file = "pygame-2.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21160d9093533eb831f1b708e630706e5ac16b30750571ec27bc3b8364814f38"},
+ {file = "pygame-2.6.1-cp38-cp38-win32.whl", hash = "sha256:7bffdd3eaf394d9645331d1c3a5df9d782ebcc3c5a78f3b657c7879a828dd111"},
+ {file = "pygame-2.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:818b4eaec9c4acb6ac64805d4ca8edd4062bebca77bd815c18739fe2842c97e9"},
+ {file = "pygame-2.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15efaa11a80a65dd589a95bebe812fa5bfc7e14946b638a424c5bd9ac6cca1a4"},
+ {file = "pygame-2.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:481cfe1bdbb7fe00acc5950c494c26f00240888619bdc396fc8c39a734797432"},
+ {file = "pygame-2.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d09fd950725d187aa5207c0cb8eb9ab0d2f8ce9ab8d189c30eeb470e71b617e"},
+ {file = "pygame-2.6.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:163e66de169bd5670c86e27d0b74aad0d2d745e3b63cf4e7eb5b2bff1231ca8d"},
+ {file = "pygame-2.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb6e8d0547f30ddc845f4fd1e33070ef548233ad0dbf21f7ecea768883d1bbdc"},
+ {file = "pygame-2.6.1-cp39-cp39-win32.whl", hash = "sha256:d29eb9a93f12aa3d997b6e3c447ac85b2a4b142ab2548441523a8fcf5e216042"},
+ {file = "pygame-2.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:6582aa71a681e02e55d43150a9ab41394e6bf4d783d2962a10aea58f424be060"},
+ {file = "pygame-2.6.1-pp36-pypy36_pp73-win32.whl", hash = "sha256:4a8ea113b1bf627322a025a1a5a87e3818a7f55ab3a4077ff1ae5c8c60576614"},
+ {file = "pygame-2.6.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b7f9f8e6f76de36f4725175d686601214af362a4f30614b4dae2240198e72e6f"},
+ {file = "pygame-2.6.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:bbb7167c92103a2091366e9af26d4914ba3776666e8677d3c93551353fffa626"},
+ {file = "pygame-2.6.1-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17498a2b043bc0e795faedef1b081199c688890200aef34991c1941caa2d2c89"},
+ {file = "pygame-2.6.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7103c60939bbc1e05cfc7ba3f1d2ad3bbf103b7828b82a7166a9ab6f51950146"},
+ {file = "pygame-2.6.1.tar.gz", hash = "sha256:56fb02ead529cee00d415c3e007f75e0780c655909aaa8e8bf616ee09c9feb1f"},
+]
+
+[[package]]
+name = "pyglet"
+version = "2.0.18"
+description = "pyglet is a cross-platform games and multimedia package."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "pyglet-2.0.18-py3-none-any.whl", hash = "sha256:e592952ae0297e456c587b6486ed8c3e5f9d0c3519d517bb92dde5fdf4c26b41"},
+ {file = "pyglet-2.0.18.tar.gz", hash = "sha256:7cf9238d70082a2da282759679f8a011cc979753a32224a8ead8ed80e48f99dc"},
+]
+
+[[package]]
+name = "pygments"
+version = "2.18.0"
+description = "Pygments is a syntax highlighting package written in Python."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"},
+ {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"},
]
+[package.extras]
+windows-terminal = ["colorama (>=0.4.6)"]
+
[[package]]
name = "pymunk"
version = "6.8.1"
@@ -3182,13 +5298,13 @@ files = [
[[package]]
name = "pyparsing"
-version = "3.1.2"
+version = "3.1.4"
description = "pyparsing module - Classes and methods to define and execute parsing grammars"
optional = true
python-versions = ">=3.6.8"
files = [
- {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"},
- {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"},
+ {file = "pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c"},
+ {file = "pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032"},
]
[package.extras]
@@ -3217,6 +5333,64 @@ files = [
{file = "pyrealsense2-2.55.1.6486-cp39-cp39-win_amd64.whl", hash = "sha256:5cbede3cd35946f3051ae6df42619ea01419c58379533c596bbad5dbf648c25b"},
]
+[[package]]
+name = "pyrender"
+version = "0.1.45"
+description = "Easy-to-use Python renderer for 3D visualization"
+optional = true
+python-versions = "*"
+files = []
+develop = false
+
+[package.dependencies]
+freetype-py = "*"
+imageio = "*"
+networkx = "*"
+numpy = "*"
+Pillow = "*"
+pyglet = ">=1.4.10"
+PyOpenGL = ">=3.1.0,<3.2.0"
+scipy = "*"
+six = "*"
+trimesh = "*"
+
+[package.extras]
+dev = ["flake8", "pre-commit", "pytest", "pytest-cov", "tox"]
+docs = ["sphinx", "sphinx-automodapi", "sphinx-rtd-theme"]
+
+[package.source]
+type = "git"
+url = "https://github.com/mmatl/pyrender.git"
+reference = "HEAD"
+resolved_reference = "a59963ef890891656fd17c90e12d663233dcaa99"
+
+[[package]]
+name = "pyribbit"
+version = "0.1.46"
+description = "Easy-to-use Python renderer for 3D visualization"
+optional = true
+python-versions = "*"
+files = [
+ {file = "pyribbit-0.1.46-py3-none-any.whl", hash = "sha256:0d4943f7cc6903f20ef42787e9357d7bb25c95f2c04da9dfa1a8021bdf9e0ab6"},
+ {file = "pyribbit-0.1.46.tar.gz", hash = "sha256:3bb7a31841549ed74c50e31415738d2494b720df825cf387501f17102299940b"},
+]
+
+[package.dependencies]
+freetype-py = "*"
+imageio = "*"
+networkx = "*"
+numpy = "*"
+Pillow = "*"
+pyglet = ">=1.4.10"
+PyOpenGL = ">=3.1.0"
+scipy = "*"
+six = "*"
+trimesh = "*"
+
+[package.extras]
+dev = ["flake8", "pre-commit", "pytest", "pytest-cov", "tox"]
+docs = ["sphinx", "sphinx-automodapi", "sphinx-rtd-theme"]
+
[[package]]
name = "pyserial"
version = "3.5"
@@ -3245,13 +5419,13 @@ files = [
[[package]]
name = "pytest"
-version = "8.2.2"
+version = "8.3.3"
description = "pytest: simple powerful testing with Python"
optional = true
python-versions = ">=3.8"
files = [
- {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"},
- {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"},
+ {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"},
+ {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"},
]
[package.dependencies]
@@ -3259,7 +5433,7 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""}
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
iniconfig = "*"
packaging = "*"
-pluggy = ">=1.5,<2.0"
+pluggy = ">=1.5,<2"
tomli = {version = ">=1", markers = "python_version < \"3.11\""}
[package.extras]
@@ -3297,6 +5471,36 @@ files = [
[package.dependencies]
six = ">=1.5"
+[[package]]
+name = "python-json-logger"
+version = "2.0.7"
+description = "A python library adding a json log formatter"
+optional = true
+python-versions = ">=3.6"
+files = [
+ {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"},
+ {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"},
+]
+
+[[package]]
+name = "python-utils"
+version = "3.9.0"
+description = "Python Utils is a module with some convenient utilities not included with the standard Python install"
+optional = true
+python-versions = ">3.9.0"
+files = [
+ {file = "python_utils-3.9.0-py2.py3-none-any.whl", hash = "sha256:a7719a5ef4bae7360d2a15c13b08c4e3c3e39b9df19bd16f119ff8d0cfeaafb7"},
+ {file = "python_utils-3.9.0.tar.gz", hash = "sha256:3689556884e3ae53aec5a4c9f17b36e752a3e93a7ba2768c6553fc4dd6fa70ef"},
+]
+
+[package.dependencies]
+typing-extensions = ">3.10.0.2"
+
+[package.extras]
+docs = ["mock", "python-utils", "sphinx"]
+loguru = ["loguru"]
+tests = ["blessings", "loguru", "loguru-mypy", "mypy-ipython", "pyright", "pytest", "pytest-asyncio", "pytest-cov", "pytest-mypy", "ruff", "sphinx", "types-setuptools"]
+
[[package]]
name = "python-xlib"
version = "0.33"
@@ -3313,163 +5517,355 @@ six = ">=1.10.0"
[[package]]
name = "pytz"
-version = "2024.1"
+version = "2024.2"
description = "World timezone definitions, modern and historical"
optional = false
python-versions = "*"
files = [
- {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"},
- {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"},
+ {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"},
+ {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"},
+]
+
+[[package]]
+name = "pyusb"
+version = "1.2.1"
+description = "Python USB access module"
+optional = true
+python-versions = ">=3.6.0"
+files = [
+ {file = "pyusb-1.2.1-py3-none-any.whl", hash = "sha256:2b4c7cb86dbadf044dfb9d3a4ff69fd217013dbe78a792177a3feb172449ea36"},
+ {file = "pyusb-1.2.1.tar.gz", hash = "sha256:a4cc7404a203144754164b8b40994e2849fde1cfff06b08492f12fff9d9de7b9"},
+]
+
+[[package]]
+name = "pywinpty"
+version = "2.0.13"
+description = "Pseudo terminal support for Windows from Python."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "pywinpty-2.0.13-cp310-none-win_amd64.whl", hash = "sha256:697bff211fb5a6508fee2dc6ff174ce03f34a9a233df9d8b5fe9c8ce4d5eaf56"},
+ {file = "pywinpty-2.0.13-cp311-none-win_amd64.whl", hash = "sha256:b96fb14698db1284db84ca38c79f15b4cfdc3172065b5137383910567591fa99"},
+ {file = "pywinpty-2.0.13-cp312-none-win_amd64.whl", hash = "sha256:2fd876b82ca750bb1333236ce98488c1be96b08f4f7647cfdf4129dfad83c2d4"},
+ {file = "pywinpty-2.0.13-cp38-none-win_amd64.whl", hash = "sha256:61d420c2116c0212808d31625611b51caf621fe67f8a6377e2e8b617ea1c1f7d"},
+ {file = "pywinpty-2.0.13-cp39-none-win_amd64.whl", hash = "sha256:71cb613a9ee24174730ac7ae439fd179ca34ccb8c5349e8d7b72ab5dea2c6f4b"},
+ {file = "pywinpty-2.0.13.tar.gz", hash = "sha256:c34e32351a3313ddd0d7da23d27f835c860d32fe4ac814d372a3ea9594f41dde"},
]
[[package]]
name = "pyyaml"
-version = "6.0.1"
+version = "6.0.2"
description = "YAML parser and emitter for Python"
optional = false
-python-versions = ">=3.6"
+python-versions = ">=3.8"
+files = [
+ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"},
+ {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"},
+ {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"},
+ {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"},
+ {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"},
+ {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"},
+ {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"},
+ {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"},
+ {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"},
+ {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"},
+ {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"},
+ {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"},
+ {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"},
+ {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"},
+ {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"},
+ {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"},
+ {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"},
+ {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"},
+ {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"},
+ {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"},
+ {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"},
+ {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"},
+ {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"},
+ {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"},
+ {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"},
+ {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"},
+ {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"},
+ {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"},
+ {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"},
+ {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"},
+ {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"},
+ {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"},
+ {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"},
+ {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"},
+ {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"},
+ {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"},
+ {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"},
+ {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"},
+ {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"},
+ {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"},
+ {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"},
+ {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"},
+ {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"},
+ {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"},
+ {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"},
+ {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"},
+ {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"},
+]
+
+[[package]]
+name = "pyzmq"
+version = "26.2.0"
+description = "Python bindings for 0MQ"
+optional = true
+python-versions = ">=3.7"
files = [
- {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
- {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
- {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
- {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
- {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
- {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
- {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
- {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
- {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
- {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"},
- {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
- {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
- {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
- {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
- {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
- {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
- {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
- {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"},
- {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
- {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
- {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
- {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
- {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
- {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
- {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
- {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"},
- {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"},
- {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"},
- {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"},
- {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"},
- {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"},
- {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"},
- {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"},
- {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"},
- {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"},
- {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
- {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
- {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
- {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
- {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
- {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
- {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
- {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"},
- {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
- {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
- {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
- {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
- {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
- {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
- {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
+ {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ddf33d97d2f52d89f6e6e7ae66ee35a4d9ca6f36eda89c24591b0c40205a3629"},
+ {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dacd995031a01d16eec825bf30802fceb2c3791ef24bcce48fa98ce40918c27b"},
+ {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89289a5ee32ef6c439086184529ae060c741334b8970a6855ec0b6ad3ff28764"},
+ {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5506f06d7dc6ecf1efacb4a013b1f05071bb24b76350832c96449f4a2d95091c"},
+ {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea039387c10202ce304af74def5021e9adc6297067f3441d348d2b633e8166a"},
+ {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2224fa4a4c2ee872886ed00a571f5e967c85e078e8e8c2530a2fb01b3309b88"},
+ {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:28ad5233e9c3b52d76196c696e362508959741e1a005fb8fa03b51aea156088f"},
+ {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1c17211bc037c7d88e85ed8b7d8f7e52db6dc8eca5590d162717c654550f7282"},
+ {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b8f86dd868d41bea9a5f873ee13bf5551c94cf6bc51baebc6f85075971fe6eea"},
+ {file = "pyzmq-26.2.0-cp310-cp310-win32.whl", hash = "sha256:46a446c212e58456b23af260f3d9fb785054f3e3653dbf7279d8f2b5546b21c2"},
+ {file = "pyzmq-26.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:49d34ab71db5a9c292a7644ce74190b1dd5a3475612eefb1f8be1d6961441971"},
+ {file = "pyzmq-26.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:bfa832bfa540e5b5c27dcf5de5d82ebc431b82c453a43d141afb1e5d2de025fa"},
+ {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:8f7e66c7113c684c2b3f1c83cdd3376103ee0ce4c49ff80a648643e57fb22218"},
+ {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3a495b30fc91db2db25120df5847d9833af237546fd59170701acd816ccc01c4"},
+ {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77eb0968da535cba0470a5165468b2cac7772cfb569977cff92e240f57e31bef"},
+ {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ace4f71f1900a548f48407fc9be59c6ba9d9aaf658c2eea6cf2779e72f9f317"},
+ {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a78853d7280bffb93df0a4a6a2498cba10ee793cc8076ef797ef2f74d107cf"},
+ {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:689c5d781014956a4a6de61d74ba97b23547e431e9e7d64f27d4922ba96e9d6e"},
+ {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aca98bc423eb7d153214b2df397c6421ba6373d3397b26c057af3c904452e37"},
+ {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f3496d76b89d9429a656293744ceca4d2ac2a10ae59b84c1da9b5165f429ad3"},
+ {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5c2b3bfd4b9689919db068ac6c9911f3fcb231c39f7dd30e3138be94896d18e6"},
+ {file = "pyzmq-26.2.0-cp311-cp311-win32.whl", hash = "sha256:eac5174677da084abf378739dbf4ad245661635f1600edd1221f150b165343f4"},
+ {file = "pyzmq-26.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a509df7d0a83a4b178d0f937ef14286659225ef4e8812e05580776c70e155d5"},
+ {file = "pyzmq-26.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0e6091b157d48cbe37bd67233318dbb53e1e6327d6fc3bb284afd585d141003"},
+ {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9"},
+ {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52"},
+ {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08"},
+ {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5"},
+ {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae"},
+ {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711"},
+ {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6"},
+ {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3"},
+ {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b"},
+ {file = "pyzmq-26.2.0-cp312-cp312-win32.whl", hash = "sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7"},
+ {file = "pyzmq-26.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a"},
+ {file = "pyzmq-26.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b"},
+ {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726"},
+ {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3"},
+ {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50"},
+ {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb"},
+ {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187"},
+ {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b"},
+ {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18"},
+ {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115"},
+ {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e"},
+ {file = "pyzmq-26.2.0-cp313-cp313-win32.whl", hash = "sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5"},
+ {file = "pyzmq-26.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad"},
+ {file = "pyzmq-26.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797"},
+ {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a"},
+ {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc"},
+ {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5"},
+ {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672"},
+ {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797"},
+ {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386"},
+ {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306"},
+ {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6"},
+ {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0"},
+ {file = "pyzmq-26.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3b55a4229ce5da9497dd0452b914556ae58e96a4381bb6f59f1305dfd7e53fc8"},
+ {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9cb3a6460cdea8fe8194a76de8895707e61ded10ad0be97188cc8463ffa7e3a8"},
+ {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ab5cad923cc95c87bffee098a27856c859bd5d0af31bd346035aa816b081fe1"},
+ {file = "pyzmq-26.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ed69074a610fad1c2fda66180e7b2edd4d31c53f2d1872bc2d1211563904cd9"},
+ {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cccba051221b916a4f5e538997c45d7d136a5646442b1231b916d0164067ea27"},
+ {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0eaa83fc4c1e271c24eaf8fb083cbccef8fde77ec8cd45f3c35a9a123e6da097"},
+ {file = "pyzmq-26.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9edda2df81daa129b25a39b86cb57dfdfe16f7ec15b42b19bfac503360d27a93"},
+ {file = "pyzmq-26.2.0-cp37-cp37m-win32.whl", hash = "sha256:ea0eb6af8a17fa272f7b98d7bebfab7836a0d62738e16ba380f440fceca2d951"},
+ {file = "pyzmq-26.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4ff9dc6bc1664bb9eec25cd17506ef6672d506115095411e237d571e92a58231"},
+ {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:2eb7735ee73ca1b0d71e0e67c3739c689067f055c764f73aac4cc8ecf958ee3f"},
+ {file = "pyzmq-26.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a534f43bc738181aa7cbbaf48e3eca62c76453a40a746ab95d4b27b1111a7d2"},
+ {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:aedd5dd8692635813368e558a05266b995d3d020b23e49581ddd5bbe197a8ab6"},
+ {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8be4700cd8bb02cc454f630dcdf7cfa99de96788b80c51b60fe2fe1dac480289"},
+ {file = "pyzmq-26.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fcc03fa4997c447dce58264e93b5aa2d57714fbe0f06c07b7785ae131512732"},
+ {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:402b190912935d3db15b03e8f7485812db350d271b284ded2b80d2e5704be780"},
+ {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8685fa9c25ff00f550c1fec650430c4b71e4e48e8d852f7ddcf2e48308038640"},
+ {file = "pyzmq-26.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:76589c020680778f06b7e0b193f4b6dd66d470234a16e1df90329f5e14a171cd"},
+ {file = "pyzmq-26.2.0-cp38-cp38-win32.whl", hash = "sha256:8423c1877d72c041f2c263b1ec6e34360448decfb323fa8b94e85883043ef988"},
+ {file = "pyzmq-26.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:76589f2cd6b77b5bdea4fca5992dc1c23389d68b18ccc26a53680ba2dc80ff2f"},
+ {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:b1d464cb8d72bfc1a3adc53305a63a8e0cac6bc8c5a07e8ca190ab8d3faa43c2"},
+ {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4da04c48873a6abdd71811c5e163bd656ee1b957971db7f35140a2d573f6949c"},
+ {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d049df610ac811dcffdc147153b414147428567fbbc8be43bb8885f04db39d98"},
+ {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05590cdbc6b902101d0e65d6a4780af14dc22914cc6ab995d99b85af45362cc9"},
+ {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c811cfcd6a9bf680236c40c6f617187515269ab2912f3d7e8c0174898e2519db"},
+ {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6835dd60355593de10350394242b5757fbbd88b25287314316f266e24c61d073"},
+ {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc6bee759a6bddea5db78d7dcd609397449cb2d2d6587f48f3ca613b19410cfc"},
+ {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c530e1eecd036ecc83c3407f77bb86feb79916d4a33d11394b8234f3bd35b940"},
+ {file = "pyzmq-26.2.0-cp39-cp39-win32.whl", hash = "sha256:367b4f689786fca726ef7a6c5ba606958b145b9340a5e4808132cc65759abd44"},
+ {file = "pyzmq-26.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:e6fa2e3e683f34aea77de8112f6483803c96a44fd726d7358b9888ae5bb394ec"},
+ {file = "pyzmq-26.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:7445be39143a8aa4faec43b076e06944b8f9d0701b669df4af200531b21e40bb"},
+ {file = "pyzmq-26.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:706e794564bec25819d21a41c31d4df2d48e1cc4b061e8d345d7fb4dd3e94072"},
+ {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b435f2753621cd36e7c1762156815e21c985c72b19135dac43a7f4f31d28dd1"},
+ {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160c7e0a5eb178011e72892f99f918c04a131f36056d10d9c1afb223fc952c2d"},
+ {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4a71d5d6e7b28a47a394c0471b7e77a0661e2d651e7ae91e0cab0a587859ca"},
+ {file = "pyzmq-26.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:90412f2db8c02a3864cbfc67db0e3dcdbda336acf1c469526d3e869394fe001c"},
+ {file = "pyzmq-26.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2ea4ad4e6a12e454de05f2949d4beddb52460f3de7c8b9d5c46fbb7d7222e02c"},
+ {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fc4f7a173a5609631bb0c42c23d12c49df3966f89f496a51d3eb0ec81f4519d6"},
+ {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:878206a45202247781472a2d99df12a176fef806ca175799e1c6ad263510d57c"},
+ {file = "pyzmq-26.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17c412bad2eb9468e876f556eb4ee910e62d721d2c7a53c7fa31e643d35352e6"},
+ {file = "pyzmq-26.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0d987a3ae5a71c6226b203cfd298720e0086c7fe7c74f35fa8edddfbd6597eed"},
+ {file = "pyzmq-26.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:39887ac397ff35b7b775db7201095fc6310a35fdbae85bac4523f7eb3b840e20"},
+ {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:fdb5b3e311d4d4b0eb8b3e8b4d1b0a512713ad7e6a68791d0923d1aec433d919"},
+ {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:226af7dcb51fdb0109f0016449b357e182ea0ceb6b47dfb5999d569e5db161d5"},
+ {file = "pyzmq-26.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bed0e799e6120b9c32756203fb9dfe8ca2fb8467fed830c34c877e25638c3fc"},
+ {file = "pyzmq-26.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:29c7947c594e105cb9e6c466bace8532dc1ca02d498684128b339799f5248277"},
+ {file = "pyzmq-26.2.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cdeabcff45d1c219636ee2e54d852262e5c2e085d6cb476d938aee8d921356b3"},
+ {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35cffef589bcdc587d06f9149f8d5e9e8859920a071df5a2671de2213bef592a"},
+ {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18c8dc3b7468d8b4bdf60ce9d7141897da103c7a4690157b32b60acb45e333e6"},
+ {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7133d0a1677aec369d67dd78520d3fa96dd7f3dcec99d66c1762870e5ea1a50a"},
+ {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a96179a24b14fa6428cbfc08641c779a53f8fcec43644030328f44034c7f1f4"},
+ {file = "pyzmq-26.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4f78c88905461a9203eac9faac157a2a0dbba84a0fd09fd29315db27be40af9f"},
+ {file = "pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f"},
]
+[package.dependencies]
+cffi = {version = "*", markers = "implementation_name == \"pypy\""}
+
+[[package]]
+name = "referencing"
+version = "0.35.1"
+description = "JSON Referencing + Python"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"},
+ {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"},
+]
+
+[package.dependencies]
+attrs = ">=22.2.0"
+rpds-py = ">=0.7.0"
+
[[package]]
name = "regex"
-version = "2024.5.15"
+version = "2024.9.11"
description = "Alternative regular expression module, to replace re."
optional = false
python-versions = ">=3.8"
files = [
- {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a81e3cfbae20378d75185171587cbf756015ccb14840702944f014e0d93ea09f"},
- {file = "regex-2024.5.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b59138b219ffa8979013be7bc85bb60c6f7b7575df3d56dc1e403a438c7a3f6"},
- {file = "regex-2024.5.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0bd000c6e266927cb7a1bc39d55be95c4b4f65c5be53e659537537e019232b1"},
- {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5eaa7ddaf517aa095fa8da0b5015c44d03da83f5bd49c87961e3c997daed0de7"},
- {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba68168daedb2c0bab7fd7e00ced5ba90aebf91024dea3c88ad5063c2a562cca"},
- {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e8d717bca3a6e2064fc3a08df5cbe366369f4b052dcd21b7416e6d71620dca1"},
- {file = "regex-2024.5.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1337b7dbef9b2f71121cdbf1e97e40de33ff114801263b275aafd75303bd62b5"},
- {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f9ebd0a36102fcad2f03696e8af4ae682793a5d30b46c647eaf280d6cfb32796"},
- {file = "regex-2024.5.15-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9efa1a32ad3a3ea112224897cdaeb6aa00381627f567179c0314f7b65d354c62"},
- {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1595f2d10dff3d805e054ebdc41c124753631b6a471b976963c7b28543cf13b0"},
- {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b802512f3e1f480f41ab5f2cfc0e2f761f08a1f41092d6718868082fc0d27143"},
- {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:a0981022dccabca811e8171f913de05720590c915b033b7e601f35ce4ea7019f"},
- {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:19068a6a79cf99a19ccefa44610491e9ca02c2be3305c7760d3831d38a467a6f"},
- {file = "regex-2024.5.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:1b5269484f6126eee5e687785e83c6b60aad7663dafe842b34691157e5083e53"},
- {file = "regex-2024.5.15-cp310-cp310-win32.whl", hash = "sha256:ada150c5adfa8fbcbf321c30c751dc67d2f12f15bd183ffe4ec7cde351d945b3"},
- {file = "regex-2024.5.15-cp310-cp310-win_amd64.whl", hash = "sha256:ac394ff680fc46b97487941f5e6ae49a9f30ea41c6c6804832063f14b2a5a145"},
- {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f5b1dff3ad008dccf18e652283f5e5339d70bf8ba7c98bf848ac33db10f7bc7a"},
- {file = "regex-2024.5.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c6a2b494a76983df8e3d3feea9b9ffdd558b247e60b92f877f93a1ff43d26656"},
- {file = "regex-2024.5.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a32b96f15c8ab2e7d27655969a23895eb799de3665fa94349f3b2fbfd547236f"},
- {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10002e86e6068d9e1c91eae8295ef690f02f913c57db120b58fdd35a6bb1af35"},
- {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec54d5afa89c19c6dd8541a133be51ee1017a38b412b1321ccb8d6ddbeb4cf7d"},
- {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10e4ce0dca9ae7a66e6089bb29355d4432caed736acae36fef0fdd7879f0b0cb"},
- {file = "regex-2024.5.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e507ff1e74373c4d3038195fdd2af30d297b4f0950eeda6f515ae3d84a1770f"},
- {file = "regex-2024.5.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1f059a4d795e646e1c37665b9d06062c62d0e8cc3c511fe01315973a6542e40"},
- {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0721931ad5fe0dda45d07f9820b90b2148ccdd8e45bb9e9b42a146cb4f695649"},
- {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:833616ddc75ad595dee848ad984d067f2f31be645d603e4d158bba656bbf516c"},
- {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:287eb7f54fc81546346207c533ad3c2c51a8d61075127d7f6d79aaf96cdee890"},
- {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:19dfb1c504781a136a80ecd1fff9f16dddf5bb43cec6871778c8a907a085bb3d"},
- {file = "regex-2024.5.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:119af6e56dce35e8dfb5222573b50c89e5508d94d55713c75126b753f834de68"},
- {file = "regex-2024.5.15-cp311-cp311-win32.whl", hash = "sha256:1c1c174d6ec38d6c8a7504087358ce9213d4332f6293a94fbf5249992ba54efa"},
- {file = "regex-2024.5.15-cp311-cp311-win_amd64.whl", hash = "sha256:9e717956dcfd656f5055cc70996ee2cc82ac5149517fc8e1b60261b907740201"},
- {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:632b01153e5248c134007209b5c6348a544ce96c46005d8456de1d552455b014"},
- {file = "regex-2024.5.15-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e64198f6b856d48192bf921421fdd8ad8eb35e179086e99e99f711957ffedd6e"},
- {file = "regex-2024.5.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68811ab14087b2f6e0fc0c2bae9ad689ea3584cad6917fc57be6a48bbd012c49"},
- {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ec0c2fea1e886a19c3bee0cd19d862b3aa75dcdfb42ebe8ed30708df64687a"},
- {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0c0c0003c10f54a591d220997dd27d953cd9ccc1a7294b40a4be5312be8797b"},
- {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2431b9e263af1953c55abbd3e2efca67ca80a3de8a0437cb58e2421f8184717a"},
- {file = "regex-2024.5.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a605586358893b483976cffc1723fb0f83e526e8f14c6e6614e75919d9862cf"},
- {file = "regex-2024.5.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:391d7f7f1e409d192dba8bcd42d3e4cf9e598f3979cdaed6ab11288da88cb9f2"},
- {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9ff11639a8d98969c863d4617595eb5425fd12f7c5ef6621a4b74b71ed8726d5"},
- {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4eee78a04e6c67e8391edd4dad3279828dd66ac4b79570ec998e2155d2e59fd5"},
- {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8fe45aa3f4aa57faabbc9cb46a93363edd6197cbc43523daea044e9ff2fea83e"},
- {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:d0a3d8d6acf0c78a1fff0e210d224b821081330b8524e3e2bc5a68ef6ab5803d"},
- {file = "regex-2024.5.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c486b4106066d502495b3025a0a7251bf37ea9540433940a23419461ab9f2a80"},
- {file = "regex-2024.5.15-cp312-cp312-win32.whl", hash = "sha256:c49e15eac7c149f3670b3e27f1f28a2c1ddeccd3a2812cba953e01be2ab9b5fe"},
- {file = "regex-2024.5.15-cp312-cp312-win_amd64.whl", hash = "sha256:673b5a6da4557b975c6c90198588181029c60793835ce02f497ea817ff647cb2"},
- {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87e2a9c29e672fc65523fb47a90d429b70ef72b901b4e4b1bd42387caf0d6835"},
- {file = "regex-2024.5.15-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c3bea0ba8b73b71b37ac833a7f3fd53825924165da6a924aec78c13032f20850"},
- {file = "regex-2024.5.15-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfc4f82cabe54f1e7f206fd3d30fda143f84a63fe7d64a81558d6e5f2e5aaba9"},
- {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5bb9425fe881d578aeca0b2b4b3d314ec88738706f66f219c194d67179337cb"},
- {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64c65783e96e563103d641760664125e91bd85d8e49566ee560ded4da0d3e704"},
- {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf2430df4148b08fb4324b848672514b1385ae3807651f3567871f130a728cc3"},
- {file = "regex-2024.5.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5397de3219a8b08ae9540c48f602996aa6b0b65d5a61683e233af8605c42b0f2"},
- {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:455705d34b4154a80ead722f4f185b04c4237e8e8e33f265cd0798d0e44825fa"},
- {file = "regex-2024.5.15-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2b6f1b3bb6f640c1a92be3bbfbcb18657b125b99ecf141fb3310b5282c7d4ed"},
- {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3ad070b823ca5890cab606c940522d05d3d22395d432f4aaaf9d5b1653e47ced"},
- {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5b5467acbfc153847d5adb21e21e29847bcb5870e65c94c9206d20eb4e99a384"},
- {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e6662686aeb633ad65be2a42b4cb00178b3fbf7b91878f9446075c404ada552f"},
- {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:2b4c884767504c0e2401babe8b5b7aea9148680d2e157fa28f01529d1f7fcf67"},
- {file = "regex-2024.5.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3cd7874d57f13bf70078f1ff02b8b0aa48d5b9ed25fc48547516c6aba36f5741"},
- {file = "regex-2024.5.15-cp38-cp38-win32.whl", hash = "sha256:e4682f5ba31f475d58884045c1a97a860a007d44938c4c0895f41d64481edbc9"},
- {file = "regex-2024.5.15-cp38-cp38-win_amd64.whl", hash = "sha256:d99ceffa25ac45d150e30bd9ed14ec6039f2aad0ffa6bb87a5936f5782fc1569"},
- {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:13cdaf31bed30a1e1c2453ef6015aa0983e1366fad2667657dbcac7b02f67133"},
- {file = "regex-2024.5.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cac27dcaa821ca271855a32188aa61d12decb6fe45ffe3e722401fe61e323cd1"},
- {file = "regex-2024.5.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7dbe2467273b875ea2de38ded4eba86cbcbc9a1a6d0aa11dcf7bd2e67859c435"},
- {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64f18a9a3513a99c4bef0e3efd4c4a5b11228b48aa80743be822b71e132ae4f5"},
- {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d347a741ea871c2e278fde6c48f85136c96b8659b632fb57a7d1ce1872547600"},
- {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1878b8301ed011704aea4c806a3cadbd76f84dece1ec09cc9e4dc934cfa5d4da"},
- {file = "regex-2024.5.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4babf07ad476aaf7830d77000874d7611704a7fcf68c9c2ad151f5d94ae4bfc4"},
- {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35cb514e137cb3488bce23352af3e12fb0dbedd1ee6e60da053c69fb1b29cc6c"},
- {file = "regex-2024.5.15-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cdd09d47c0b2efee9378679f8510ee6955d329424c659ab3c5e3a6edea696294"},
- {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:72d7a99cd6b8f958e85fc6ca5b37c4303294954eac1376535b03c2a43eb72629"},
- {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a094801d379ab20c2135529948cb84d417a2169b9bdceda2a36f5f10977ebc16"},
- {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c0c18345010870e58238790a6779a1219b4d97bd2e77e1140e8ee5d14df071aa"},
- {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:16093f563098448ff6b1fa68170e4acbef94e6b6a4e25e10eae8598bb1694b5d"},
- {file = "regex-2024.5.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:e38a7d4e8f633a33b4c7350fbd8bad3b70bf81439ac67ac38916c4a86b465456"},
- {file = "regex-2024.5.15-cp39-cp39-win32.whl", hash = "sha256:71a455a3c584a88f654b64feccc1e25876066c4f5ef26cd6dd711308aa538694"},
- {file = "regex-2024.5.15-cp39-cp39-win_amd64.whl", hash = "sha256:cab12877a9bdafde5500206d1020a584355a97884dfd388af3699e9137bf7388"},
- {file = "regex-2024.5.15.tar.gz", hash = "sha256:d3ee02d9e5f482cc8309134a91eeaacbdd2261ba111b0fef3748eeb4913e6a2c"},
+ {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1494fa8725c285a81d01dc8c06b55287a1ee5e0e382d8413adc0a9197aac6408"},
+ {file = "regex-2024.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0e12c481ad92d129c78f13a2a3662317e46ee7ef96c94fd332e1c29131875b7d"},
+ {file = "regex-2024.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:16e13a7929791ac1216afde26f712802e3df7bf0360b32e4914dca3ab8baeea5"},
+ {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46989629904bad940bbec2106528140a218b4a36bb3042d8406980be1941429c"},
+ {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a906ed5e47a0ce5f04b2c981af1c9acf9e8696066900bf03b9d7879a6f679fc8"},
+ {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a091b0550b3b0207784a7d6d0f1a00d1d1c8a11699c1a4d93db3fbefc3ad35"},
+ {file = "regex-2024.9.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ddcd9a179c0a6fa8add279a4444015acddcd7f232a49071ae57fa6e278f1f71"},
+ {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b41e1adc61fa347662b09398e31ad446afadff932a24807d3ceb955ed865cc8"},
+ {file = "regex-2024.9.11-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ced479f601cd2f8ca1fd7b23925a7e0ad512a56d6e9476f79b8f381d9d37090a"},
+ {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:635a1d96665f84b292e401c3d62775851aedc31d4f8784117b3c68c4fcd4118d"},
+ {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c0256beda696edcf7d97ef16b2a33a8e5a875affd6fa6567b54f7c577b30a137"},
+ {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ce4f1185db3fbde8ed8aa223fc9620f276c58de8b0d4f8cc86fd1360829edb6"},
+ {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:09d77559e80dcc9d24570da3745ab859a9cf91953062e4ab126ba9d5993688ca"},
+ {file = "regex-2024.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a22ccefd4db3f12b526eccb129390942fe874a3a9fdbdd24cf55773a1faab1a"},
+ {file = "regex-2024.9.11-cp310-cp310-win32.whl", hash = "sha256:f745ec09bc1b0bd15cfc73df6fa4f726dcc26bb16c23a03f9e3367d357eeedd0"},
+ {file = "regex-2024.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:01c2acb51f8a7d6494c8c5eafe3d8e06d76563d8a8a4643b37e9b2dd8a2ff623"},
+ {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2cce2449e5927a0bf084d346da6cd5eb016b2beca10d0013ab50e3c226ffc0df"},
+ {file = "regex-2024.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b37fa423beefa44919e009745ccbf353d8c981516e807995b2bd11c2c77d268"},
+ {file = "regex-2024.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64ce2799bd75039b480cc0360907c4fb2f50022f030bf9e7a8705b636e408fad"},
+ {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4cc92bb6db56ab0c1cbd17294e14f5e9224f0cc6521167ef388332604e92679"},
+ {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d05ac6fa06959c4172eccd99a222e1fbf17b5670c4d596cb1e5cde99600674c4"},
+ {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:040562757795eeea356394a7fb13076ad4f99d3c62ab0f8bdfb21f99a1f85664"},
+ {file = "regex-2024.9.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6113c008a7780792efc80f9dfe10ba0cd043cbf8dc9a76ef757850f51b4edc50"},
+ {file = "regex-2024.9.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e5fb5f77c8745a60105403a774fe2c1759b71d3e7b4ca237a5e67ad066c7199"},
+ {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:54d9ff35d4515debf14bc27f1e3b38bfc453eff3220f5bce159642fa762fe5d4"},
+ {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:df5cbb1fbc74a8305b6065d4ade43b993be03dbe0f8b30032cced0d7740994bd"},
+ {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7fb89ee5d106e4a7a51bce305ac4efb981536301895f7bdcf93ec92ae0d91c7f"},
+ {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a738b937d512b30bf75995c0159c0ddf9eec0775c9d72ac0202076c72f24aa96"},
+ {file = "regex-2024.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e28f9faeb14b6f23ac55bfbbfd3643f5c7c18ede093977f1df249f73fd22c7b1"},
+ {file = "regex-2024.9.11-cp311-cp311-win32.whl", hash = "sha256:18e707ce6c92d7282dfce370cd205098384b8ee21544e7cb29b8aab955b66fa9"},
+ {file = "regex-2024.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:313ea15e5ff2a8cbbad96ccef6be638393041b0a7863183c2d31e0c6116688cf"},
+ {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b0d0a6c64fcc4ef9c69bd5b3b3626cc3776520a1637d8abaa62b9edc147a58f7"},
+ {file = "regex-2024.9.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:49b0e06786ea663f933f3710a51e9385ce0cba0ea56b67107fd841a55d56a231"},
+ {file = "regex-2024.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5b513b6997a0b2f10e4fd3a1313568e373926e8c252bd76c960f96fd039cd28d"},
+ {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee439691d8c23e76f9802c42a95cfeebf9d47cf4ffd06f18489122dbb0a7ad64"},
+ {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a8f877c89719d759e52783f7fe6e1c67121076b87b40542966c02de5503ace42"},
+ {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23b30c62d0f16827f2ae9f2bb87619bc4fba2044911e2e6c2eb1af0161cdb766"},
+ {file = "regex-2024.9.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85ab7824093d8f10d44330fe1e6493f756f252d145323dd17ab6b48733ff6c0a"},
+ {file = "regex-2024.9.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8dee5b4810a89447151999428fe096977346cf2f29f4d5e29609d2e19e0199c9"},
+ {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:98eeee2f2e63edae2181c886d7911ce502e1292794f4c5ee71e60e23e8d26b5d"},
+ {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:57fdd2e0b2694ce6fc2e5ccf189789c3e2962916fb38779d3e3521ff8fe7a822"},
+ {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:d552c78411f60b1fdaafd117a1fca2f02e562e309223b9d44b7de8be451ec5e0"},
+ {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:a0b2b80321c2ed3fcf0385ec9e51a12253c50f146fddb2abbb10f033fe3d049a"},
+ {file = "regex-2024.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:18406efb2f5a0e57e3a5881cd9354c1512d3bb4f5c45d96d110a66114d84d23a"},
+ {file = "regex-2024.9.11-cp312-cp312-win32.whl", hash = "sha256:e464b467f1588e2c42d26814231edecbcfe77f5ac414d92cbf4e7b55b2c2a776"},
+ {file = "regex-2024.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:9e8719792ca63c6b8340380352c24dcb8cd7ec49dae36e963742a275dfae6009"},
+ {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c157bb447303070f256e084668b702073db99bbb61d44f85d811025fcf38f784"},
+ {file = "regex-2024.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4db21ece84dfeefc5d8a3863f101995de646c6cb0536952c321a2650aa202c36"},
+ {file = "regex-2024.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:220e92a30b426daf23bb67a7962900ed4613589bab80382be09b48896d211e92"},
+ {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb1ae19e64c14c7ec1995f40bd932448713d3c73509e82d8cd7744dc00e29e86"},
+ {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f47cd43a5bfa48f86925fe26fbdd0a488ff15b62468abb5d2a1e092a4fb10e85"},
+ {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d4a76b96f398697fe01117093613166e6aa8195d63f1b4ec3f21ab637632963"},
+ {file = "regex-2024.9.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ea51dcc0835eea2ea31d66456210a4e01a076d820e9039b04ae8d17ac11dee6"},
+ {file = "regex-2024.9.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7aaa315101c6567a9a45d2839322c51c8d6e81f67683d529512f5bcfb99c802"},
+ {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c57d08ad67aba97af57a7263c2d9006d5c404d721c5f7542f077f109ec2a4a29"},
+ {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f8404bf61298bb6f8224bb9176c1424548ee1181130818fcd2cbffddc768bed8"},
+ {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dd4490a33eb909ef5078ab20f5f000087afa2a4daa27b4c072ccb3cb3050ad84"},
+ {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:eee9130eaad130649fd73e5cd92f60e55708952260ede70da64de420cdcad554"},
+ {file = "regex-2024.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6a2644a93da36c784e546de579ec1806bfd2763ef47babc1b03d765fe560c9f8"},
+ {file = "regex-2024.9.11-cp313-cp313-win32.whl", hash = "sha256:e997fd30430c57138adc06bba4c7c2968fb13d101e57dd5bb9355bf8ce3fa7e8"},
+ {file = "regex-2024.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:042c55879cfeb21a8adacc84ea347721d3d83a159da6acdf1116859e2427c43f"},
+ {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:35f4a6f96aa6cb3f2f7247027b07b15a374f0d5b912c0001418d1d55024d5cb4"},
+ {file = "regex-2024.9.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:55b96e7ce3a69a8449a66984c268062fbaa0d8ae437b285428e12797baefce7e"},
+ {file = "regex-2024.9.11-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cb130fccd1a37ed894824b8c046321540263013da72745d755f2d35114b81a60"},
+ {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:323c1f04be6b2968944d730e5c2091c8c89767903ecaa135203eec4565ed2b2b"},
+ {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:be1c8ed48c4c4065ecb19d882a0ce1afe0745dfad8ce48c49586b90a55f02366"},
+ {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b5b029322e6e7b94fff16cd120ab35a253236a5f99a79fb04fda7ae71ca20ae8"},
+ {file = "regex-2024.9.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6fff13ef6b5f29221d6904aa816c34701462956aa72a77f1f151a8ec4f56aeb"},
+ {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:587d4af3979376652010e400accc30404e6c16b7df574048ab1f581af82065e4"},
+ {file = "regex-2024.9.11-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:079400a8269544b955ffa9e31f186f01d96829110a3bf79dc338e9910f794fca"},
+ {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f9268774428ec173654985ce55fc6caf4c6d11ade0f6f914d48ef4719eb05ebb"},
+ {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:23f9985c8784e544d53fc2930fc1ac1a7319f5d5332d228437acc9f418f2f168"},
+ {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ae2941333154baff9838e88aa71c1d84f4438189ecc6021a12c7573728b5838e"},
+ {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e93f1c331ca8e86fe877a48ad64e77882c0c4da0097f2212873a69bbfea95d0c"},
+ {file = "regex-2024.9.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:846bc79ee753acf93aef4184c040d709940c9d001029ceb7b7a52747b80ed2dd"},
+ {file = "regex-2024.9.11-cp38-cp38-win32.whl", hash = "sha256:c94bb0a9f1db10a1d16c00880bdebd5f9faf267273b8f5bd1878126e0fbde771"},
+ {file = "regex-2024.9.11-cp38-cp38-win_amd64.whl", hash = "sha256:2b08fce89fbd45664d3df6ad93e554b6c16933ffa9d55cb7e01182baaf971508"},
+ {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:07f45f287469039ffc2c53caf6803cd506eb5f5f637f1d4acb37a738f71dd066"},
+ {file = "regex-2024.9.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4838e24ee015101d9f901988001038f7f0d90dc0c3b115541a1365fb439add62"},
+ {file = "regex-2024.9.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6edd623bae6a737f10ce853ea076f56f507fd7726bee96a41ee3d68d347e4d16"},
+ {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c69ada171c2d0e97a4b5aa78fbb835e0ffbb6b13fc5da968c09811346564f0d3"},
+ {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02087ea0a03b4af1ed6ebab2c54d7118127fee8d71b26398e8e4b05b78963199"},
+ {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:69dee6a020693d12a3cf892aba4808fe168d2a4cef368eb9bf74f5398bfd4ee8"},
+ {file = "regex-2024.9.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:297f54910247508e6e5cae669f2bc308985c60540a4edd1c77203ef19bfa63ca"},
+ {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ecea58b43a67b1b79805f1a0255730edaf5191ecef84dbc4cc85eb30bc8b63b9"},
+ {file = "regex-2024.9.11-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eab4bb380f15e189d1313195b062a6aa908f5bd687a0ceccd47c8211e9cf0d4a"},
+ {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0cbff728659ce4bbf4c30b2a1be040faafaa9eca6ecde40aaff86f7889f4ab39"},
+ {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:54c4a097b8bc5bb0dfc83ae498061d53ad7b5762e00f4adaa23bee22b012e6ba"},
+ {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:73d6d2f64f4d894c96626a75578b0bf7d9e56dcda8c3d037a2118fdfe9b1c664"},
+ {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:e53b5fbab5d675aec9f0c501274c467c0f9a5d23696cfc94247e1fb56501ed89"},
+ {file = "regex-2024.9.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0ffbcf9221e04502fc35e54d1ce9567541979c3fdfb93d2c554f0ca583a19b35"},
+ {file = "regex-2024.9.11-cp39-cp39-win32.whl", hash = "sha256:e4c22e1ac1f1ec1e09f72e6c44d8f2244173db7eb9629cc3a346a8d7ccc31142"},
+ {file = "regex-2024.9.11-cp39-cp39-win_amd64.whl", hash = "sha256:faa3c142464efec496967359ca99696c896c591c56c53506bac1ad465f66e919"},
+ {file = "regex-2024.9.11.tar.gz", hash = "sha256:6c188c307e8433bcb63dc1915022deb553b4203a70722fc542c363bf120a01fd"},
+]
+
+[[package]]
+name = "renamed-opencv-python-inference-engine"
+version = "2022.1.5"
+description = "Wrapper package for OpenCV with Inference Engine python bindings, but compiled under another namespace to prevent conflicts with the default OpenCV python packages"
+optional = true
+python-versions = "*"
+files = [
+ {file = "renamed_opencv_python_inference_engine-2022.1.5-py3-none-manylinux1_x86_64.whl", hash = "sha256:c92666acfd75f8b29b9f1aa566d4ad3851387fcea3992f113f72adf449477523"},
]
+[package.dependencies]
+numpy = "*"
+
[[package]]
name = "requests"
version = "2.32.3"
@@ -3494,16 +5890,16 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "rerun-sdk"
-version = "0.17.0"
+version = "0.18.2"
description = "The Rerun Logging SDK"
optional = false
python-versions = "<3.13,>=3.8"
files = [
- {file = "rerun_sdk-0.17.0-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:abd34f746eada83b8bb0bc50007183151981d7ccf18306f3d42165819a3f6fcb"},
- {file = "rerun_sdk-0.17.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:8b0a8a6feab3f8e679801d158216a71d88a81480021587719330f50d083c4d26"},
- {file = "rerun_sdk-0.17.0-cp38-abi3-manylinux_2_31_aarch64.whl", hash = "sha256:ad55807abafb01e527846742e087819aac8e103f1ec15aadc563a4038bb44e1d"},
- {file = "rerun_sdk-0.17.0-cp38-abi3-manylinux_2_31_x86_64.whl", hash = "sha256:9d41f1f475270b1e0d50ddb8cb62e0d828988f0c371ac8457af25c8be5aa1dc0"},
- {file = "rerun_sdk-0.17.0-cp38-abi3-win_amd64.whl", hash = "sha256:34e5595a326cbdddfebdf00b08e877358c564fce74cc8c6d617fc89ef3a6aa70"},
+ {file = "rerun_sdk-0.18.2-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:bc4e73275f428e4e9feb8e85f88db7a9fd18b997b1570de62f949a926978f1b2"},
+ {file = "rerun_sdk-0.18.2-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:efbba40a59710ae83607cb0dc140398a35979c2d2acf5190c9def2ac4697f6a8"},
+ {file = "rerun_sdk-0.18.2-cp38-abi3-manylinux_2_31_aarch64.whl", hash = "sha256:2a5e3b618b6d1bfde09bd5614a898995f3c318cc69d8f6d569924a2cd41536ce"},
+ {file = "rerun_sdk-0.18.2-cp38-abi3-manylinux_2_31_x86_64.whl", hash = "sha256:8fdfc4c51ef2e75cb68d39e56f0d7c196eff250cb9a0260c07d5e2d6736e31b0"},
+ {file = "rerun_sdk-0.18.2-cp38-abi3-win_amd64.whl", hash = "sha256:c929ade91d3be301b26671b25e70fb529524ced915523d266641c6fc667a1eb5"},
]
[package.dependencies]
@@ -3514,116 +5910,295 @@ pyarrow = ">=14.0.2"
typing-extensions = ">=4.5"
[package.extras]
-notebook = ["rerun-notebook (==0.17.0)"]
+notebook = ["rerun-notebook (==0.18.2)"]
tests = ["pytest (==7.1.2)"]
+[[package]]
+name = "rfc3339-validator"
+version = "0.1.4"
+description = "A pure python RFC3339 validator"
+optional = true
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"},
+ {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"},
+]
+
+[package.dependencies]
+six = "*"
+
+[[package]]
+name = "rfc3986-validator"
+version = "0.1.1"
+description = "Pure python rfc3986 validator"
+optional = true
+python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
+files = [
+ {file = "rfc3986_validator-0.1.1-py2.py3-none-any.whl", hash = "sha256:2f235c432ef459970b4306369336b9d5dbdda31b510ca1e327636e01f528bfa9"},
+ {file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"},
+]
+
+[[package]]
+name = "rich"
+version = "13.9.2"
+description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
+optional = true
+python-versions = ">=3.8.0"
+files = [
+ {file = "rich-13.9.2-py3-none-any.whl", hash = "sha256:8c82a3d3f8dcfe9e734771313e606b39d8247bb6b826e196f4914b333b743cf1"},
+ {file = "rich-13.9.2.tar.gz", hash = "sha256:51a2c62057461aaf7152b4d611168f93a9fc73068f8ded2790f29fe2b5366d0c"},
+]
+
+[package.dependencies]
+markdown-it-py = ">=2.2.0"
+pygments = ">=2.13.0,<3.0.0"
+typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""}
+
+[package.extras]
+jupyter = ["ipywidgets (>=7.5.1,<9)"]
+
+[[package]]
+name = "rpds-py"
+version = "0.20.0"
+description = "Python bindings to Rust's persistent data structures (rpds)"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "rpds_py-0.20.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3ad0fda1635f8439cde85c700f964b23ed5fc2d28016b32b9ee5fe30da5c84e2"},
+ {file = "rpds_py-0.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9bb4a0d90fdb03437c109a17eade42dfbf6190408f29b2744114d11586611d6f"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6377e647bbfd0a0b159fe557f2c6c602c159fc752fa316572f012fc0bf67150"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb851b7df9dda52dc1415ebee12362047ce771fc36914586b2e9fcbd7d293b3e"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e0f80b739e5a8f54837be5d5c924483996b603d5502bfff79bf33da06164ee2"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a8c94dad2e45324fc74dce25e1645d4d14df9a4e54a30fa0ae8bad9a63928e3"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8e604fe73ba048c06085beaf51147eaec7df856824bfe7b98657cf436623daf"},
+ {file = "rpds_py-0.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:df3de6b7726b52966edf29663e57306b23ef775faf0ac01a3e9f4012a24a4140"},
+ {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cf258ede5bc22a45c8e726b29835b9303c285ab46fc7c3a4cc770736b5304c9f"},
+ {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:55fea87029cded5df854ca7e192ec7bdb7ecd1d9a3f63d5c4eb09148acf4a7ce"},
+ {file = "rpds_py-0.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ae94bd0b2f02c28e199e9bc51485d0c5601f58780636185660f86bf80c89af94"},
+ {file = "rpds_py-0.20.0-cp310-none-win32.whl", hash = "sha256:28527c685f237c05445efec62426d285e47a58fb05ba0090a4340b73ecda6dee"},
+ {file = "rpds_py-0.20.0-cp310-none-win_amd64.whl", hash = "sha256:238a2d5b1cad28cdc6ed15faf93a998336eb041c4e440dd7f902528b8891b399"},
+ {file = "rpds_py-0.20.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac2f4f7a98934c2ed6505aead07b979e6f999389f16b714448fb39bbaa86a489"},
+ {file = "rpds_py-0.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:220002c1b846db9afd83371d08d239fdc865e8f8c5795bbaec20916a76db3318"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d7919548df3f25374a1f5d01fbcd38dacab338ef5f33e044744b5c36729c8db"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:758406267907b3781beee0f0edfe4a179fbd97c0be2e9b1154d7f0a1279cf8e5"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d61339e9f84a3f0767b1995adfb171a0d00a1185192718a17af6e124728e0f5"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1259c7b3705ac0a0bd38197565a5d603218591d3f6cee6e614e380b6ba61c6f6"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c1dc0f53856b9cc9a0ccca0a7cc61d3d20a7088201c0937f3f4048c1718a209"},
+ {file = "rpds_py-0.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7e60cb630f674a31f0368ed32b2a6b4331b8350d67de53c0359992444b116dd3"},
+ {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbe982f38565bb50cb7fb061ebf762c2f254ca3d8c20d4006878766e84266272"},
+ {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:514b3293b64187172bc77c8fb0cdae26981618021053b30d8371c3a902d4d5ad"},
+ {file = "rpds_py-0.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d0a26ffe9d4dd35e4dfdd1e71f46401cff0181c75ac174711ccff0459135fa58"},
+ {file = "rpds_py-0.20.0-cp311-none-win32.whl", hash = "sha256:89c19a494bf3ad08c1da49445cc5d13d8fefc265f48ee7e7556839acdacf69d0"},
+ {file = "rpds_py-0.20.0-cp311-none-win_amd64.whl", hash = "sha256:c638144ce971df84650d3ed0096e2ae7af8e62ecbbb7b201c8935c370df00a2c"},
+ {file = "rpds_py-0.20.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a84ab91cbe7aab97f7446652d0ed37d35b68a465aeef8fc41932a9d7eee2c1a6"},
+ {file = "rpds_py-0.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:56e27147a5a4c2c21633ff8475d185734c0e4befd1c989b5b95a5d0db699b21b"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2580b0c34583b85efec8c5c5ec9edf2dfe817330cc882ee972ae650e7b5ef739"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b80d4a7900cf6b66bb9cee5c352b2d708e29e5a37fe9bf784fa97fc11504bf6c"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50eccbf054e62a7b2209b28dc7a22d6254860209d6753e6b78cfaeb0075d7bee"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:49a8063ea4296b3a7e81a5dfb8f7b2d73f0b1c20c2af401fb0cdf22e14711a96"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea438162a9fcbee3ecf36c23e6c68237479f89f962f82dae83dc15feeceb37e4"},
+ {file = "rpds_py-0.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:18d7585c463087bddcfa74c2ba267339f14f2515158ac4db30b1f9cbdb62c8ef"},
+ {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d4c7d1a051eeb39f5c9547e82ea27cbcc28338482242e3e0b7768033cb083821"},
+ {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4df1e3b3bec320790f699890d41c59d250f6beda159ea3c44c3f5bac1976940"},
+ {file = "rpds_py-0.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2cf126d33a91ee6eedc7f3197b53e87a2acdac63602c0f03a02dd69e4b138174"},
+ {file = "rpds_py-0.20.0-cp312-none-win32.whl", hash = "sha256:8bc7690f7caee50b04a79bf017a8d020c1f48c2a1077ffe172abec59870f1139"},
+ {file = "rpds_py-0.20.0-cp312-none-win_amd64.whl", hash = "sha256:0e13e6952ef264c40587d510ad676a988df19adea20444c2b295e536457bc585"},
+ {file = "rpds_py-0.20.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:aa9a0521aeca7d4941499a73ad7d4f8ffa3d1affc50b9ea11d992cd7eff18a29"},
+ {file = "rpds_py-0.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1f1d51eccb7e6c32ae89243cb352389228ea62f89cd80823ea7dd1b98e0b91"},
+ {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a86a9b96070674fc88b6f9f71a97d2c1d3e5165574615d1f9168ecba4cecb24"},
+ {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c8ef2ebf76df43f5750b46851ed1cdf8f109d7787ca40035fe19fbdc1acc5a7"},
+ {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b25f024b421d5859d156750ea9a65651793d51b76a2e9238c05c9d5f203a9"},
+ {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57eb94a8c16ab08fef6404301c38318e2c5a32216bf5de453e2714c964c125c8"},
+ {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1940dae14e715e2e02dfd5b0f64a52e8374a517a1e531ad9412319dc3ac7879"},
+ {file = "rpds_py-0.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d20277fd62e1b992a50c43f13fbe13277a31f8c9f70d59759c88f644d66c619f"},
+ {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:06db23d43f26478303e954c34c75182356ca9aa7797d22c5345b16871ab9c45c"},
+ {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2a5db5397d82fa847e4c624b0c98fe59d2d9b7cf0ce6de09e4d2e80f8f5b3f2"},
+ {file = "rpds_py-0.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a35df9f5548fd79cb2f52d27182108c3e6641a4feb0f39067911bf2adaa3e57"},
+ {file = "rpds_py-0.20.0-cp313-none-win32.whl", hash = "sha256:fd2d84f40633bc475ef2d5490b9c19543fbf18596dcb1b291e3a12ea5d722f7a"},
+ {file = "rpds_py-0.20.0-cp313-none-win_amd64.whl", hash = "sha256:9bc2d153989e3216b0559251b0c260cfd168ec78b1fac33dd485750a228db5a2"},
+ {file = "rpds_py-0.20.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:f2fbf7db2012d4876fb0d66b5b9ba6591197b0f165db8d99371d976546472a24"},
+ {file = "rpds_py-0.20.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1e5f3cd7397c8f86c8cc72d5a791071431c108edd79872cdd96e00abd8497d29"},
+ {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce9845054c13696f7af7f2b353e6b4f676dab1b4b215d7fe5e05c6f8bb06f965"},
+ {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c3e130fd0ec56cb76eb49ef52faead8ff09d13f4527e9b0c400307ff72b408e1"},
+ {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b16aa0107ecb512b568244ef461f27697164d9a68d8b35090e9b0c1c8b27752"},
+ {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aa7f429242aae2947246587d2964fad750b79e8c233a2367f71b554e9447949c"},
+ {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0fc424a5842a11e28956e69395fbbeab2c97c42253169d87e90aac2886d751"},
+ {file = "rpds_py-0.20.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c00a3b1e70c1d3891f0db1b05292747f0dbcfb49c43f9244d04c70fbc40eb8"},
+ {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:40ce74fc86ee4645d0a225498d091d8bc61f39b709ebef8204cb8b5a464d3c0e"},
+ {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4fe84294c7019456e56d93e8ababdad5a329cd25975be749c3f5f558abb48253"},
+ {file = "rpds_py-0.20.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:338ca4539aad4ce70a656e5187a3a31c5204f261aef9f6ab50e50bcdffaf050a"},
+ {file = "rpds_py-0.20.0-cp38-none-win32.whl", hash = "sha256:54b43a2b07db18314669092bb2de584524d1ef414588780261e31e85846c26a5"},
+ {file = "rpds_py-0.20.0-cp38-none-win_amd64.whl", hash = "sha256:a1862d2d7ce1674cffa6d186d53ca95c6e17ed2b06b3f4c476173565c862d232"},
+ {file = "rpds_py-0.20.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3fde368e9140312b6e8b6c09fb9f8c8c2f00999d1823403ae90cc00480221b22"},
+ {file = "rpds_py-0.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9824fb430c9cf9af743cf7aaf6707bf14323fb51ee74425c380f4c846ea70789"},
+ {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11ef6ce74616342888b69878d45e9f779b95d4bd48b382a229fe624a409b72c5"},
+ {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c52d3f2f82b763a24ef52f5d24358553e8403ce05f893b5347098014f2d9eff2"},
+ {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d35cef91e59ebbeaa45214861874bc6f19eb35de96db73e467a8358d701a96c"},
+ {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72278a30111e5b5525c1dd96120d9e958464316f55adb030433ea905866f4de"},
+ {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4c29cbbba378759ac5786730d1c3cb4ec6f8ababf5c42a9ce303dc4b3d08cda"},
+ {file = "rpds_py-0.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6632f2d04f15d1bd6fe0eedd3b86d9061b836ddca4c03d5cf5c7e9e6b7c14580"},
+ {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d0b67d87bb45ed1cd020e8fbf2307d449b68abc45402fe1a4ac9e46c3c8b192b"},
+ {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ec31a99ca63bf3cd7f1a5ac9fe95c5e2d060d3c768a09bc1d16e235840861420"},
+ {file = "rpds_py-0.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22e6c9976e38f4d8c4a63bd8a8edac5307dffd3ee7e6026d97f3cc3a2dc02a0b"},
+ {file = "rpds_py-0.20.0-cp39-none-win32.whl", hash = "sha256:569b3ea770c2717b730b61998b6c54996adee3cef69fc28d444f3e7920313cf7"},
+ {file = "rpds_py-0.20.0-cp39-none-win_amd64.whl", hash = "sha256:e6900ecdd50ce0facf703f7a00df12374b74bbc8ad9fe0f6559947fb20f82364"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:617c7357272c67696fd052811e352ac54ed1d9b49ab370261a80d3b6ce385045"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9426133526f69fcaba6e42146b4e12d6bc6c839b8b555097020e2b78ce908dcc"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deb62214c42a261cb3eb04d474f7155279c1a8a8c30ac89b7dcb1721d92c3c02"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcaeb7b57f1a1e071ebd748984359fef83ecb026325b9d4ca847c95bc7311c92"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d454b8749b4bd70dd0a79f428731ee263fa6995f83ccb8bada706e8d1d3ff89d"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d807dc2051abe041b6649681dce568f8e10668e3c1c6543ebae58f2d7e617855"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c20f0ddeb6e29126d45f89206b8291352b8c5b44384e78a6499d68b52ae511"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b7f19250ceef892adf27f0399b9e5afad019288e9be756d6919cb58892129f51"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:4f1ed4749a08379555cebf4650453f14452eaa9c43d0a95c49db50c18b7da075"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dcedf0b42bcb4cfff4101d7771a10532415a6106062f005ab97d1d0ab5681c60"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:39ed0d010457a78f54090fafb5d108501b5aa5604cc22408fc1c0c77eac14344"},
+ {file = "rpds_py-0.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:bb273176be34a746bdac0b0d7e4e2c467323d13640b736c4c477881a3220a989"},
+ {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f918a1a130a6dfe1d7fe0f105064141342e7dd1611f2e6a21cd2f5c8cb1cfb3e"},
+ {file = "rpds_py-0.20.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f60012a73aa396be721558caa3a6fd49b3dd0033d1675c6d59c4502e870fcf0c"},
+ {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d2b1ad682a3dfda2a4e8ad8572f3100f95fad98cb99faf37ff0ddfe9cbf9d03"},
+ {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:614fdafe9f5f19c63ea02817fa4861c606a59a604a77c8cdef5aa01d28b97921"},
+ {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fa518bcd7600c584bf42e6617ee8132869e877db2f76bcdc281ec6a4113a53ab"},
+ {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0475242f447cc6cb8a9dd486d68b2ef7fbee84427124c232bff5f63b1fe11e5"},
+ {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f90a4cd061914a60bd51c68bcb4357086991bd0bb93d8aa66a6da7701370708f"},
+ {file = "rpds_py-0.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:def7400461c3a3f26e49078302e1c1b38f6752342c77e3cf72ce91ca69fb1bc1"},
+ {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:65794e4048ee837494aea3c21a28ad5fc080994dfba5b036cf84de37f7ad5074"},
+ {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:faefcc78f53a88f3076b7f8be0a8f8d35133a3ecf7f3770895c25f8813460f08"},
+ {file = "rpds_py-0.20.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5b4f105deeffa28bbcdff6c49b34e74903139afa690e35d2d9e3c2c2fba18cec"},
+ {file = "rpds_py-0.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fdfc3a892927458d98f3d55428ae46b921d1f7543b89382fdb483f5640daaec8"},
+ {file = "rpds_py-0.20.0.tar.gz", hash = "sha256:d72a210824facfdaf8768cf2d7ca25a042c30320b3020de2fa04640920d4e121"},
+]
+
+[[package]]
+name = "rplidar-roboticia"
+version = "0.9.5"
+description = "Simple and lightweight module for working with RPLidar laser scanners"
+optional = true
+python-versions = "*"
+files = [
+ {file = "rplidar-roboticia-0.9.5.tar.gz", hash = "sha256:709e9143f7701d69e8439231b065e676f7d5a6086cd2922113b055bedf99f0e3"},
+]
+
+[package.dependencies]
+pyserial = "*"
+
[[package]]
name = "safetensors"
-version = "0.4.3"
+version = "0.4.5"
description = ""
optional = false
python-versions = ">=3.7"
files = [
- {file = "safetensors-0.4.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:dcf5705cab159ce0130cd56057f5f3425023c407e170bca60b4868048bae64fd"},
- {file = "safetensors-0.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bb4f8c5d0358a31e9a08daeebb68f5e161cdd4018855426d3f0c23bb51087055"},
- {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70a5319ef409e7f88686a46607cbc3c428271069d8b770076feaf913664a07ac"},
- {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fb9c65bd82f9ef3ce4970dc19ee86be5f6f93d032159acf35e663c6bea02b237"},
- {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:edb5698a7bc282089f64c96c477846950358a46ede85a1c040e0230344fdde10"},
- {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:efcc860be094b8d19ac61b452ec635c7acb9afa77beb218b1d7784c6d41fe8ad"},
- {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d88b33980222085dd6001ae2cad87c6068e0991d4f5ccf44975d216db3b57376"},
- {file = "safetensors-0.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5fc6775529fb9f0ce2266edd3e5d3f10aab068e49f765e11f6f2a63b5367021d"},
- {file = "safetensors-0.4.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9c6ad011c1b4e3acff058d6b090f1da8e55a332fbf84695cf3100c649cc452d1"},
- {file = "safetensors-0.4.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c496c5401c1b9c46d41a7688e8ff5b0310a3b9bae31ce0f0ae870e1ea2b8caf"},
- {file = "safetensors-0.4.3-cp310-none-win32.whl", hash = "sha256:38e2a8666178224a51cca61d3cb4c88704f696eac8f72a49a598a93bbd8a4af9"},
- {file = "safetensors-0.4.3-cp310-none-win_amd64.whl", hash = "sha256:393e6e391467d1b2b829c77e47d726f3b9b93630e6a045b1d1fca67dc78bf632"},
- {file = "safetensors-0.4.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:22f3b5d65e440cec0de8edaa672efa888030802e11c09b3d6203bff60ebff05a"},
- {file = "safetensors-0.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c4fa560ebd4522adddb71dcd25d09bf211b5634003f015a4b815b7647d62ebe"},
- {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9afd5358719f1b2cf425fad638fc3c887997d6782da317096877e5b15b2ce93"},
- {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d8c5093206ef4b198600ae484230402af6713dab1bd5b8e231905d754022bec7"},
- {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0b2104df1579d6ba9052c0ae0e3137c9698b2d85b0645507e6fd1813b70931a"},
- {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8cf18888606dad030455d18f6c381720e57fc6a4170ee1966adb7ebc98d4d6a3"},
- {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0bf4f9d6323d9f86eef5567eabd88f070691cf031d4c0df27a40d3b4aaee755b"},
- {file = "safetensors-0.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:585c9ae13a205807b63bef8a37994f30c917ff800ab8a1ca9c9b5d73024f97ee"},
- {file = "safetensors-0.4.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faefeb3b81bdfb4e5a55b9bbdf3d8d8753f65506e1d67d03f5c851a6c87150e9"},
- {file = "safetensors-0.4.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:befdf0167ad626f22f6aac6163477fcefa342224a22f11fdd05abb3995c1783c"},
- {file = "safetensors-0.4.3-cp311-none-win32.whl", hash = "sha256:a7cef55929dcbef24af3eb40bedec35d82c3c2fa46338bb13ecf3c5720af8a61"},
- {file = "safetensors-0.4.3-cp311-none-win_amd64.whl", hash = "sha256:840b7ac0eff5633e1d053cc9db12fdf56b566e9403b4950b2dc85393d9b88d67"},
- {file = "safetensors-0.4.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:22d21760dc6ebae42e9c058d75aa9907d9f35e38f896e3c69ba0e7b213033856"},
- {file = "safetensors-0.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d22c1a10dff3f64d0d68abb8298a3fd88ccff79f408a3e15b3e7f637ef5c980"},
- {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1648568667f820b8c48317c7006221dc40aced1869908c187f493838a1362bc"},
- {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:446e9fe52c051aeab12aac63d1017e0f68a02a92a027b901c4f8e931b24e5397"},
- {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fef5d70683643618244a4f5221053567ca3e77c2531e42ad48ae05fae909f542"},
- {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a1f4430cc0c9d6afa01214a4b3919d0a029637df8e09675ceef1ca3f0dfa0df"},
- {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d603846a8585b9432a0fd415db1d4c57c0f860eb4aea21f92559ff9902bae4d"},
- {file = "safetensors-0.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a844cdb5d7cbc22f5f16c7e2a0271170750763c4db08381b7f696dbd2c78a361"},
- {file = "safetensors-0.4.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:88887f69f7a00cf02b954cdc3034ffb383b2303bc0ab481d4716e2da51ddc10e"},
- {file = "safetensors-0.4.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ee463219d9ec6c2be1d331ab13a8e0cd50d2f32240a81d498266d77d07b7e71e"},
- {file = "safetensors-0.4.3-cp312-none-win32.whl", hash = "sha256:d0dd4a1db09db2dba0f94d15addc7e7cd3a7b0d393aa4c7518c39ae7374623c3"},
- {file = "safetensors-0.4.3-cp312-none-win_amd64.whl", hash = "sha256:d14d30c25897b2bf19b6fb5ff7e26cc40006ad53fd4a88244fdf26517d852dd7"},
- {file = "safetensors-0.4.3-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:d1456f814655b224d4bf6e7915c51ce74e389b413be791203092b7ff78c936dd"},
- {file = "safetensors-0.4.3-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:455d538aa1aae4a8b279344a08136d3f16334247907b18a5c3c7fa88ef0d3c46"},
- {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf476bca34e1340ee3294ef13e2c625833f83d096cfdf69a5342475602004f95"},
- {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:02ef3a24face643456020536591fbd3c717c5abaa2737ec428ccbbc86dffa7a4"},
- {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7de32d0d34b6623bb56ca278f90db081f85fb9c5d327e3c18fd23ac64f465768"},
- {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a0deb16a1d3ea90c244ceb42d2c6c276059616be21a19ac7101aa97da448faf"},
- {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c59d51f182c729f47e841510b70b967b0752039f79f1de23bcdd86462a9b09ee"},
- {file = "safetensors-0.4.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1f598b713cc1a4eb31d3b3203557ac308acf21c8f41104cdd74bf640c6e538e3"},
- {file = "safetensors-0.4.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:5757e4688f20df083e233b47de43845d1adb7e17b6cf7da5f8444416fc53828d"},
- {file = "safetensors-0.4.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fe746d03ed8d193674a26105e4f0fe6c726f5bb602ffc695b409eaf02f04763d"},
- {file = "safetensors-0.4.3-cp37-none-win32.whl", hash = "sha256:0d5ffc6a80f715c30af253e0e288ad1cd97a3d0086c9c87995e5093ebc075e50"},
- {file = "safetensors-0.4.3-cp37-none-win_amd64.whl", hash = "sha256:a11c374eb63a9c16c5ed146457241182f310902bd2a9c18255781bb832b6748b"},
- {file = "safetensors-0.4.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b1e31be7945f66be23f4ec1682bb47faa3df34cb89fc68527de6554d3c4258a4"},
- {file = "safetensors-0.4.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:03a4447c784917c9bf01d8f2ac5080bc15c41692202cd5f406afba16629e84d6"},
- {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d244bcafeb1bc06d47cfee71727e775bca88a8efda77a13e7306aae3813fa7e4"},
- {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53c4879b9c6bd7cd25d114ee0ef95420e2812e676314300624594940a8d6a91f"},
- {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74707624b81f1b7f2b93f5619d4a9f00934d5948005a03f2c1845ffbfff42212"},
- {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0d52c958dc210265157573f81d34adf54e255bc2b59ded6218500c9b15a750eb"},
- {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f9568f380f513a60139971169c4a358b8731509cc19112369902eddb33faa4d"},
- {file = "safetensors-0.4.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0d9cd8e1560dfc514b6d7859247dc6a86ad2f83151a62c577428d5102d872721"},
- {file = "safetensors-0.4.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:89f9f17b0dacb913ed87d57afbc8aad85ea42c1085bd5de2f20d83d13e9fc4b2"},
- {file = "safetensors-0.4.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:1139eb436fd201c133d03c81209d39ac57e129f5e74e34bb9ab60f8d9b726270"},
- {file = "safetensors-0.4.3-cp38-none-win32.whl", hash = "sha256:d9c289f140a9ae4853fc2236a2ffc9a9f2d5eae0cb673167e0f1b8c18c0961ac"},
- {file = "safetensors-0.4.3-cp38-none-win_amd64.whl", hash = "sha256:622afd28968ef3e9786562d352659a37de4481a4070f4ebac883f98c5836563e"},
- {file = "safetensors-0.4.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8651c7299cbd8b4161a36cd6a322fa07d39cd23535b144d02f1c1972d0c62f3c"},
- {file = "safetensors-0.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e375d975159ac534c7161269de24ddcd490df2157b55c1a6eeace6cbb56903f0"},
- {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:084fc436e317f83f7071fc6a62ca1c513b2103db325cd09952914b50f51cf78f"},
- {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:41a727a7f5e6ad9f1db6951adee21bbdadc632363d79dc434876369a17de6ad6"},
- {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7dbbde64b6c534548696808a0e01276d28ea5773bc9a2dfb97a88cd3dffe3df"},
- {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bbae3b4b9d997971431c346edbfe6e41e98424a097860ee872721e176040a893"},
- {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01e4b22e3284cd866edeabe4f4d896229495da457229408d2e1e4810c5187121"},
- {file = "safetensors-0.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dd37306546b58d3043eb044c8103a02792cc024b51d1dd16bd3dd1f334cb3ed"},
- {file = "safetensors-0.4.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d8815b5e1dac85fc534a97fd339e12404db557878c090f90442247e87c8aeaea"},
- {file = "safetensors-0.4.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e011cc162503c19f4b1fd63dfcddf73739c7a243a17dac09b78e57a00983ab35"},
- {file = "safetensors-0.4.3-cp39-none-win32.whl", hash = "sha256:01feb3089e5932d7e662eda77c3ecc389f97c0883c4a12b5cfdc32b589a811c3"},
- {file = "safetensors-0.4.3-cp39-none-win_amd64.whl", hash = "sha256:3f9cdca09052f585e62328c1c2923c70f46814715c795be65f0b93f57ec98a02"},
- {file = "safetensors-0.4.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1b89381517891a7bb7d1405d828b2bf5d75528299f8231e9346b8eba092227f9"},
- {file = "safetensors-0.4.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:cd6fff9e56df398abc5866b19a32124815b656613c1c5ec0f9350906fd798aac"},
- {file = "safetensors-0.4.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:840caf38d86aa7014fe37ade5d0d84e23dcfbc798b8078015831996ecbc206a3"},
- {file = "safetensors-0.4.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9650713b2cfa9537a2baf7dd9fee458b24a0aaaa6cafcea8bdd5fb2b8efdc34"},
- {file = "safetensors-0.4.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e4119532cd10dba04b423e0f86aecb96cfa5a602238c0aa012f70c3a40c44b50"},
- {file = "safetensors-0.4.3-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e066e8861eef6387b7c772344d1fe1f9a72800e04ee9a54239d460c400c72aab"},
- {file = "safetensors-0.4.3-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:90964917f5b0fa0fa07e9a051fbef100250c04d150b7026ccbf87a34a54012e0"},
- {file = "safetensors-0.4.3-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c41e1893d1206aa7054029681778d9a58b3529d4c807002c156d58426c225173"},
- {file = "safetensors-0.4.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae7613a119a71a497d012ccc83775c308b9c1dab454806291427f84397d852fd"},
- {file = "safetensors-0.4.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9bac020faba7f5dc481e881b14b6425265feabb5bfc552551d21189c0eddc3"},
- {file = "safetensors-0.4.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:420a98f593ff9930f5822560d14c395ccbc57342ddff3b463bc0b3d6b1951550"},
- {file = "safetensors-0.4.3-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f5e6883af9a68c0028f70a4c19d5a6ab6238a379be36ad300a22318316c00cb0"},
- {file = "safetensors-0.4.3-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:cdd0a3b5da66e7f377474599814dbf5cbf135ff059cc73694de129b58a5e8a2c"},
- {file = "safetensors-0.4.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9bfb92f82574d9e58401d79c70c716985dc049b635fef6eecbb024c79b2c46ad"},
- {file = "safetensors-0.4.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:3615a96dd2dcc30eb66d82bc76cda2565f4f7bfa89fcb0e31ba3cea8a1a9ecbb"},
- {file = "safetensors-0.4.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:868ad1b6fc41209ab6bd12f63923e8baeb1a086814cb2e81a65ed3d497e0cf8f"},
- {file = "safetensors-0.4.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7ffba80aa49bd09195145a7fd233a7781173b422eeb995096f2b30591639517"},
- {file = "safetensors-0.4.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0acbe31340ab150423347e5b9cc595867d814244ac14218932a5cf1dd38eb39"},
- {file = "safetensors-0.4.3-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19bbdf95de2cf64f25cd614c5236c8b06eb2cfa47cbf64311f4b5d80224623a3"},
- {file = "safetensors-0.4.3-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b852e47eb08475c2c1bd8131207b405793bfc20d6f45aff893d3baaad449ed14"},
- {file = "safetensors-0.4.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5d07cbca5b99babb692d76d8151bec46f461f8ad8daafbfd96b2fca40cadae65"},
- {file = "safetensors-0.4.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1ab6527a20586d94291c96e00a668fa03f86189b8a9defa2cdd34a1a01acc7d5"},
- {file = "safetensors-0.4.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02318f01e332cc23ffb4f6716e05a492c5f18b1d13e343c49265149396284a44"},
- {file = "safetensors-0.4.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec4b52ce9a396260eb9731eb6aea41a7320de22ed73a1042c2230af0212758ce"},
- {file = "safetensors-0.4.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:018b691383026a2436a22b648873ed11444a364324e7088b99cd2503dd828400"},
- {file = "safetensors-0.4.3-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:309b10dbcab63269ecbf0e2ca10ce59223bb756ca5d431ce9c9eeabd446569da"},
- {file = "safetensors-0.4.3-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b277482120df46e27a58082df06a15aebda4481e30a1c21eefd0921ae7e03f65"},
- {file = "safetensors-0.4.3.tar.gz", hash = "sha256:2f85fc50c4e07a21e95c24e07460fe6f7e2859d0ce88092838352b798ce711c2"},
+ {file = "safetensors-0.4.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a63eaccd22243c67e4f2b1c3e258b257effc4acd78f3b9d397edc8cf8f1298a7"},
+ {file = "safetensors-0.4.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:23fc9b4ec7b602915cbb4ec1a7c1ad96d2743c322f20ab709e2c35d1b66dad27"},
+ {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6885016f34bef80ea1085b7e99b3c1f92cb1be78a49839203060f67b40aee761"},
+ {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:133620f443450429322f238fda74d512c4008621227fccf2f8cf4a76206fea7c"},
+ {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4fb3e0609ec12d2a77e882f07cced530b8262027f64b75d399f1504ffec0ba56"},
+ {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0f1dd769f064adc33831f5e97ad07babbd728427f98e3e1db6902e369122737"},
+ {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6d156bdb26732feada84f9388a9f135528c1ef5b05fae153da365ad4319c4c5"},
+ {file = "safetensors-0.4.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9e347d77e2c77eb7624400ccd09bed69d35c0332f417ce8c048d404a096c593b"},
+ {file = "safetensors-0.4.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9f556eea3aec1d3d955403159fe2123ddd68e880f83954ee9b4a3f2e15e716b6"},
+ {file = "safetensors-0.4.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9483f42be3b6bc8ff77dd67302de8ae411c4db39f7224dec66b0eb95822e4163"},
+ {file = "safetensors-0.4.5-cp310-none-win32.whl", hash = "sha256:7389129c03fadd1ccc37fd1ebbc773f2b031483b04700923c3511d2a939252cc"},
+ {file = "safetensors-0.4.5-cp310-none-win_amd64.whl", hash = "sha256:e98ef5524f8b6620c8cdef97220c0b6a5c1cef69852fcd2f174bb96c2bb316b1"},
+ {file = "safetensors-0.4.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:21f848d7aebd5954f92538552d6d75f7c1b4500f51664078b5b49720d180e47c"},
+ {file = "safetensors-0.4.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bb07000b19d41e35eecef9a454f31a8b4718a185293f0d0b1c4b61d6e4487971"},
+ {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09dedf7c2fda934ee68143202acff6e9e8eb0ddeeb4cfc24182bef999efa9f42"},
+ {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:59b77e4b7a708988d84f26de3ebead61ef1659c73dcbc9946c18f3b1786d2688"},
+ {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d3bc83e14d67adc2e9387e511097f254bd1b43c3020440e708858c684cbac68"},
+ {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39371fc551c1072976073ab258c3119395294cf49cdc1f8476794627de3130df"},
+ {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6c19feda32b931cae0acd42748a670bdf56bee6476a046af20181ad3fee4090"},
+ {file = "safetensors-0.4.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a659467495de201e2f282063808a41170448c78bada1e62707b07a27b05e6943"},
+ {file = "safetensors-0.4.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bad5e4b2476949bcd638a89f71b6916fa9a5cae5c1ae7eede337aca2100435c0"},
+ {file = "safetensors-0.4.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a3a315a6d0054bc6889a17f5668a73f94f7fe55121ff59e0a199e3519c08565f"},
+ {file = "safetensors-0.4.5-cp311-none-win32.whl", hash = "sha256:a01e232e6d3d5cf8b1667bc3b657a77bdab73f0743c26c1d3c5dd7ce86bd3a92"},
+ {file = "safetensors-0.4.5-cp311-none-win_amd64.whl", hash = "sha256:cbd39cae1ad3e3ef6f63a6f07296b080c951f24cec60188378e43d3713000c04"},
+ {file = "safetensors-0.4.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:473300314e026bd1043cef391bb16a8689453363381561b8a3e443870937cc1e"},
+ {file = "safetensors-0.4.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:801183a0f76dc647f51a2d9141ad341f9665602a7899a693207a82fb102cc53e"},
+ {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1524b54246e422ad6fb6aea1ac71edeeb77666efa67230e1faf6999df9b2e27f"},
+ {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b3139098e3e8b2ad7afbca96d30ad29157b50c90861084e69fcb80dec7430461"},
+ {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65573dc35be9059770808e276b017256fa30058802c29e1038eb1c00028502ea"},
+ {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fd33da8e9407559f8779c82a0448e2133737f922d71f884da27184549416bfed"},
+ {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3685ce7ed036f916316b567152482b7e959dc754fcc4a8342333d222e05f407c"},
+ {file = "safetensors-0.4.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dde2bf390d25f67908278d6f5d59e46211ef98e44108727084d4637ee70ab4f1"},
+ {file = "safetensors-0.4.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7469d70d3de970b1698d47c11ebbf296a308702cbaae7fcb993944751cf985f4"},
+ {file = "safetensors-0.4.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a6ba28118636a130ccbb968bc33d4684c48678695dba2590169d5ab03a45646"},
+ {file = "safetensors-0.4.5-cp312-none-win32.whl", hash = "sha256:c859c7ed90b0047f58ee27751c8e56951452ed36a67afee1b0a87847d065eec6"},
+ {file = "safetensors-0.4.5-cp312-none-win_amd64.whl", hash = "sha256:b5a8810ad6a6f933fff6c276eae92c1da217b39b4d8b1bc1c0b8af2d270dc532"},
+ {file = "safetensors-0.4.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:25e5f8e2e92a74f05b4ca55686234c32aac19927903792b30ee6d7bd5653d54e"},
+ {file = "safetensors-0.4.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:81efb124b58af39fcd684254c645e35692fea81c51627259cdf6d67ff4458916"},
+ {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:585f1703a518b437f5103aa9cf70e9bd437cb78eea9c51024329e4fb8a3e3679"},
+ {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4b99fbf72e3faf0b2f5f16e5e3458b93b7d0a83984fe8d5364c60aa169f2da89"},
+ {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b17b299ca9966ca983ecda1c0791a3f07f9ca6ab5ded8ef3d283fff45f6bcd5f"},
+ {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76ded72f69209c9780fdb23ea89e56d35c54ae6abcdec67ccb22af8e696e449a"},
+ {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2783956926303dcfeb1de91a4d1204cd4089ab441e622e7caee0642281109db3"},
+ {file = "safetensors-0.4.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d94581aab8c6b204def4d7320f07534d6ee34cd4855688004a4354e63b639a35"},
+ {file = "safetensors-0.4.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:67e1e7cb8678bb1b37ac48ec0df04faf689e2f4e9e81e566b5c63d9f23748523"},
+ {file = "safetensors-0.4.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:dbd280b07e6054ea68b0cb4b16ad9703e7d63cd6890f577cb98acc5354780142"},
+ {file = "safetensors-0.4.5-cp37-cp37m-macosx_10_12_x86_64.whl", hash = "sha256:77d9b228da8374c7262046a36c1f656ba32a93df6cc51cd4453af932011e77f1"},
+ {file = "safetensors-0.4.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:500cac01d50b301ab7bb192353317035011c5ceeef0fca652f9f43c000bb7f8d"},
+ {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75331c0c746f03158ded32465b7d0b0e24c5a22121743662a2393439c43a45cf"},
+ {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:670e95fe34e0d591d0529e5e59fd9d3d72bc77b1444fcaa14dccda4f36b5a38b"},
+ {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:098923e2574ff237c517d6e840acada8e5b311cb1fa226019105ed82e9c3b62f"},
+ {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13ca0902d2648775089fa6a0c8fc9e6390c5f8ee576517d33f9261656f851e3f"},
+ {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f0032bedc869c56f8d26259fe39cd21c5199cd57f2228d817a0e23e8370af25"},
+ {file = "safetensors-0.4.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f4b15f51b4f8f2a512341d9ce3475cacc19c5fdfc5db1f0e19449e75f95c7dc8"},
+ {file = "safetensors-0.4.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f6594d130d0ad933d885c6a7b75c5183cb0e8450f799b80a39eae2b8508955eb"},
+ {file = "safetensors-0.4.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:60c828a27e852ded2c85fc0f87bf1ec20e464c5cd4d56ff0e0711855cc2e17f8"},
+ {file = "safetensors-0.4.5-cp37-none-win32.whl", hash = "sha256:6d3de65718b86c3eeaa8b73a9c3d123f9307a96bbd7be9698e21e76a56443af5"},
+ {file = "safetensors-0.4.5-cp37-none-win_amd64.whl", hash = "sha256:5a2d68a523a4cefd791156a4174189a4114cf0bf9c50ceb89f261600f3b2b81a"},
+ {file = "safetensors-0.4.5-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:e7a97058f96340850da0601a3309f3d29d6191b0702b2da201e54c6e3e44ccf0"},
+ {file = "safetensors-0.4.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:63bfd425e25f5c733f572e2246e08a1c38bd6f2e027d3f7c87e2e43f228d1345"},
+ {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3664ac565d0e809b0b929dae7ccd74e4d3273cd0c6d1220c6430035befb678e"},
+ {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:313514b0b9b73ff4ddfb4edd71860696dbe3c1c9dc4d5cc13dbd74da283d2cbf"},
+ {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31fa33ee326f750a2f2134a6174773c281d9a266ccd000bd4686d8021f1f3dac"},
+ {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09566792588d77b68abe53754c9f1308fadd35c9f87be939e22c623eaacbed6b"},
+ {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309aaec9b66cbf07ad3a2e5cb8a03205663324fea024ba391594423d0f00d9fe"},
+ {file = "safetensors-0.4.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:53946c5813b8f9e26103c5efff4a931cc45d874f45229edd68557ffb35ffb9f8"},
+ {file = "safetensors-0.4.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:868f9df9e99ad1e7f38c52194063a982bc88fedc7d05096f4f8160403aaf4bd6"},
+ {file = "safetensors-0.4.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9cc9449bd0b0bc538bd5e268221f0c5590bc5c14c1934a6ae359d44410dc68c4"},
+ {file = "safetensors-0.4.5-cp38-none-win32.whl", hash = "sha256:83c4f13a9e687335c3928f615cd63a37e3f8ef072a3f2a0599fa09f863fb06a2"},
+ {file = "safetensors-0.4.5-cp38-none-win_amd64.whl", hash = "sha256:b98d40a2ffa560653f6274e15b27b3544e8e3713a44627ce268f419f35c49478"},
+ {file = "safetensors-0.4.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:cf727bb1281d66699bef5683b04d98c894a2803442c490a8d45cd365abfbdeb2"},
+ {file = "safetensors-0.4.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:96f1d038c827cdc552d97e71f522e1049fef0542be575421f7684756a748e457"},
+ {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:139fbee92570ecea774e6344fee908907db79646d00b12c535f66bc78bd5ea2c"},
+ {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c36302c1c69eebb383775a89645a32b9d266878fab619819ce660309d6176c9b"},
+ {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d641f5b8149ea98deb5ffcf604d764aad1de38a8285f86771ce1abf8e74c4891"},
+ {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b4db6a61d968de73722b858038c616a1bebd4a86abe2688e46ca0cc2d17558f2"},
+ {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b75a616e02f21b6f1d5785b20cecbab5e2bd3f6358a90e8925b813d557666ec1"},
+ {file = "safetensors-0.4.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:788ee7d04cc0e0e7f944c52ff05f52a4415b312f5efd2ee66389fb7685ee030c"},
+ {file = "safetensors-0.4.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:87bc42bd04fd9ca31396d3ca0433db0be1411b6b53ac5a32b7845a85d01ffc2e"},
+ {file = "safetensors-0.4.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4037676c86365a721a8c9510323a51861d703b399b78a6b4486a54a65a975fca"},
+ {file = "safetensors-0.4.5-cp39-none-win32.whl", hash = "sha256:1500418454529d0ed5c1564bda376c4ddff43f30fce9517d9bee7bcce5a8ef50"},
+ {file = "safetensors-0.4.5-cp39-none-win_amd64.whl", hash = "sha256:9d1a94b9d793ed8fe35ab6d5cea28d540a46559bafc6aae98f30ee0867000cab"},
+ {file = "safetensors-0.4.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fdadf66b5a22ceb645d5435a0be7a0292ce59648ca1d46b352f13cff3ea80410"},
+ {file = "safetensors-0.4.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d42ffd4c2259f31832cb17ff866c111684c87bd930892a1ba53fed28370c918c"},
+ {file = "safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd8a1f6d2063a92cd04145c7fd9e31a1c7d85fbec20113a14b487563fdbc0597"},
+ {file = "safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:951d2fcf1817f4fb0ef0b48f6696688a4e852a95922a042b3f96aaa67eedc920"},
+ {file = "safetensors-0.4.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6ac85d9a8c1af0e3132371d9f2d134695a06a96993c2e2f0bbe25debb9e3f67a"},
+ {file = "safetensors-0.4.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:e3cec4a29eb7fe8da0b1c7988bc3828183080439dd559f720414450de076fcab"},
+ {file = "safetensors-0.4.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:21742b391b859e67b26c0b2ac37f52c9c0944a879a25ad2f9f9f3cd61e7fda8f"},
+ {file = "safetensors-0.4.5-pp37-pypy37_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c7db3006a4915151ce1913652e907cdede299b974641a83fbc092102ac41b644"},
+ {file = "safetensors-0.4.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f68bf99ea970960a237f416ea394e266e0361895753df06e3e06e6ea7907d98b"},
+ {file = "safetensors-0.4.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8158938cf3324172df024da511839d373c40fbfaa83e9abf467174b2910d7b4c"},
+ {file = "safetensors-0.4.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:540ce6c4bf6b58cb0fd93fa5f143bc0ee341c93bb4f9287ccd92cf898cc1b0dd"},
+ {file = "safetensors-0.4.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bfeaa1a699c6b9ed514bd15e6a91e74738b71125a9292159e3d6b7f0a53d2cde"},
+ {file = "safetensors-0.4.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:01c8f00da537af711979e1b42a69a8ec9e1d7112f208e0e9b8a35d2c381085ef"},
+ {file = "safetensors-0.4.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a0dd565f83b30f2ca79b5d35748d0d99dd4b3454f80e03dfb41f0038e3bdf180"},
+ {file = "safetensors-0.4.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:023b6e5facda76989f4cba95a861b7e656b87e225f61811065d5c501f78cdb3f"},
+ {file = "safetensors-0.4.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9633b663393d5796f0b60249549371e392b75a0b955c07e9c6f8708a87fc841f"},
+ {file = "safetensors-0.4.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78dd8adfb48716233c45f676d6e48534d34b4bceb50162c13d1f0bdf6f78590a"},
+ {file = "safetensors-0.4.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8e8deb16c4321d61ae72533b8451ec4a9af8656d1c61ff81aa49f966406e4b68"},
+ {file = "safetensors-0.4.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:52452fa5999dc50c4decaf0c53aa28371f7f1e0fe5c2dd9129059fbe1e1599c7"},
+ {file = "safetensors-0.4.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d5f23198821e227cfc52d50fa989813513db381255c6d100927b012f0cfec63d"},
+ {file = "safetensors-0.4.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f4beb84b6073b1247a773141a6331117e35d07134b3bb0383003f39971d414bb"},
+ {file = "safetensors-0.4.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:68814d599d25ed2fdd045ed54d370d1d03cf35e02dce56de44c651f828fb9b7b"},
+ {file = "safetensors-0.4.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0b6453c54c57c1781292c46593f8a37254b8b99004c68d6c3ce229688931a22"},
+ {file = "safetensors-0.4.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:adaa9c6dead67e2dd90d634f89131e43162012479d86e25618e821a03d1eb1dc"},
+ {file = "safetensors-0.4.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:73e7d408e9012cd17511b382b43547850969c7979efc2bc353f317abaf23c84c"},
+ {file = "safetensors-0.4.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:775409ce0fcc58b10773fdb4221ed1eb007de10fe7adbdf8f5e8a56096b6f0bc"},
+ {file = "safetensors-0.4.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:834001bed193e4440c4a3950a31059523ee5090605c907c66808664c932b549c"},
+ {file = "safetensors-0.4.5.tar.gz", hash = "sha256:d73de19682deabb02524b3d5d1f8b3aaba94c72f1bbfc7911b9b9d5d391c0310"},
]
[package.extras]
@@ -3689,36 +6264,44 @@ test = ["asv", "numpydoc (>=1.7)", "pooch (>=1.6.0)", "pytest (>=7.0)", "pytest-
[[package]]
name = "scipy"
-version = "1.14.0"
+version = "1.14.1"
description = "Fundamental algorithms for scientific computing in Python"
optional = true
python-versions = ">=3.10"
files = [
- {file = "scipy-1.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7e911933d54ead4d557c02402710c2396529540b81dd554fc1ba270eb7308484"},
- {file = "scipy-1.14.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:687af0a35462402dd851726295c1a5ae5f987bd6e9026f52e9505994e2f84ef6"},
- {file = "scipy-1.14.0-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:07e179dc0205a50721022344fb85074f772eadbda1e1b3eecdc483f8033709b7"},
- {file = "scipy-1.14.0-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:6a9c9a9b226d9a21e0a208bdb024c3982932e43811b62d202aaf1bb59af264b1"},
- {file = "scipy-1.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:076c27284c768b84a45dcf2e914d4000aac537da74236a0d45d82c6fa4b7b3c0"},
- {file = "scipy-1.14.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42470ea0195336df319741e230626b6225a740fd9dce9642ca13e98f667047c0"},
- {file = "scipy-1.14.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:176c6f0d0470a32f1b2efaf40c3d37a24876cebf447498a4cefb947a79c21e9d"},
- {file = "scipy-1.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:ad36af9626d27a4326c8e884917b7ec321d8a1841cd6dacc67d2a9e90c2f0359"},
- {file = "scipy-1.14.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6d056a8709ccda6cf36cdd2eac597d13bc03dba38360f418560a93050c76a16e"},
- {file = "scipy-1.14.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:f0a50da861a7ec4573b7c716b2ebdcdf142b66b756a0d392c236ae568b3a93fb"},
- {file = "scipy-1.14.0-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:94c164a9e2498e68308e6e148646e486d979f7fcdb8b4cf34b5441894bdb9caf"},
- {file = "scipy-1.14.0-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:a7d46c3e0aea5c064e734c3eac5cf9eb1f8c4ceee756262f2c7327c4c2691c86"},
- {file = "scipy-1.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eee2989868e274aae26125345584254d97c56194c072ed96cb433f32f692ed8"},
- {file = "scipy-1.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3154691b9f7ed73778d746da2df67a19d046a6c8087c8b385bc4cdb2cfca74"},
- {file = "scipy-1.14.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c40003d880f39c11c1edbae8144e3813904b10514cd3d3d00c277ae996488cdb"},
- {file = "scipy-1.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:5b083c8940028bb7e0b4172acafda6df762da1927b9091f9611b0bcd8676f2bc"},
- {file = "scipy-1.14.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:bff2438ea1330e06e53c424893ec0072640dac00f29c6a43a575cbae4c99b2b9"},
- {file = "scipy-1.14.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:bbc0471b5f22c11c389075d091d3885693fd3f5e9a54ce051b46308bc787e5d4"},
- {file = "scipy-1.14.0-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:64b2ff514a98cf2bb734a9f90d32dc89dc6ad4a4a36a312cd0d6327170339eb0"},
- {file = "scipy-1.14.0-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:7d3da42fbbbb860211a811782504f38ae7aaec9de8764a9bef6b262de7a2b50f"},
- {file = "scipy-1.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d91db2c41dd6c20646af280355d41dfa1ec7eead235642178bd57635a3f82209"},
- {file = "scipy-1.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a01cc03bcdc777c9da3cfdcc74b5a75caffb48a6c39c8450a9a05f82c4250a14"},
- {file = "scipy-1.14.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:65df4da3c12a2bb9ad52b86b4dcf46813e869afb006e58be0f516bc370165159"},
- {file = "scipy-1.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:4c4161597c75043f7154238ef419c29a64ac4a7c889d588ea77690ac4d0d9b20"},
- {file = "scipy-1.14.0.tar.gz", hash = "sha256:b5923f48cb840380f9854339176ef21763118a7300a88203ccd0bdd26e58527b"},
+ {file = "scipy-1.14.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389"},
+ {file = "scipy-1.14.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3"},
+ {file = "scipy-1.14.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0"},
+ {file = "scipy-1.14.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3"},
+ {file = "scipy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d"},
+ {file = "scipy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69"},
+ {file = "scipy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad"},
+ {file = "scipy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5"},
+ {file = "scipy-1.14.1-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675"},
+ {file = "scipy-1.14.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2"},
+ {file = "scipy-1.14.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617"},
+ {file = "scipy-1.14.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8"},
+ {file = "scipy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37"},
+ {file = "scipy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2"},
+ {file = "scipy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2"},
+ {file = "scipy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94"},
+ {file = "scipy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d"},
+ {file = "scipy-1.14.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07"},
+ {file = "scipy-1.14.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5"},
+ {file = "scipy-1.14.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc"},
+ {file = "scipy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310"},
+ {file = "scipy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066"},
+ {file = "scipy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1"},
+ {file = "scipy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f"},
+ {file = "scipy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79"},
+ {file = "scipy-1.14.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e"},
+ {file = "scipy-1.14.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73"},
+ {file = "scipy-1.14.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e"},
+ {file = "scipy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d"},
+ {file = "scipy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e"},
+ {file = "scipy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06"},
+ {file = "scipy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84"},
+ {file = "scipy-1.14.1.tar.gz", hash = "sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417"},
]
[package.dependencies]
@@ -3726,18 +6309,34 @@ numpy = ">=1.23.5,<2.3"
[package.extras]
dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"]
-doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.13.1)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"]
-test = ["Cython", "array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"]
+doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.13.1)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<=7.3.7)", "sphinx-design (>=0.4.0)"]
+test = ["Cython", "array-api-strict (>=2.0)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"]
+
+[[package]]
+name = "send2trash"
+version = "1.8.3"
+description = "Send file to trash natively under Mac OS X, Windows and Linux"
+optional = true
+python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
+files = [
+ {file = "Send2Trash-1.8.3-py3-none-any.whl", hash = "sha256:0c31227e0bd08961c7665474a3d1ef7193929fedda4233843689baa056be46c9"},
+ {file = "Send2Trash-1.8.3.tar.gz", hash = "sha256:b18e7a3966d99871aefeb00cfbcfdced55ce4871194810fc71f4aa484b953abf"},
+]
+
+[package.extras]
+nativelib = ["pyobjc-framework-Cocoa", "pywin32"]
+objc = ["pyobjc-framework-Cocoa"]
+win32 = ["pywin32"]
[[package]]
name = "sentry-sdk"
-version = "2.10.0"
+version = "2.16.0"
description = "Python client for Sentry (https://sentry.io)"
optional = false
python-versions = ">=3.6"
files = [
- {file = "sentry_sdk-2.10.0-py2.py3-none-any.whl", hash = "sha256:87b3d413c87d8e7f816cc9334bff255a83d8b577db2b22042651c30c19c09190"},
- {file = "sentry_sdk-2.10.0.tar.gz", hash = "sha256:545fcc6e36c335faa6d6cda84669b6e17025f31efbf3b2211ec14efe008b75d1"},
+ {file = "sentry_sdk-2.16.0-py2.py3-none-any.whl", hash = "sha256:49139c31ebcd398f4f6396b18910610a0c1602f6e67083240c33019d1f6aa30c"},
+ {file = "sentry_sdk-2.16.0.tar.gz", hash = "sha256:90f733b32e15dfc1999e6b7aca67a38688a567329de4d6e184154a73f96c6892"},
]
[package.dependencies]
@@ -3760,14 +6359,16 @@ falcon = ["falcon (>=1.4)"]
fastapi = ["fastapi (>=0.79.0)"]
flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"]
grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"]
+http2 = ["httpcore[http2] (==1.*)"]
httpx = ["httpx (>=0.16.0)"]
huey = ["huey (>=2)"]
huggingface-hub = ["huggingface-hub (>=0.22)"]
langchain = ["langchain (>=0.0.210)"]
+litestar = ["litestar (>=2.0.0)"]
loguru = ["loguru (>=0.5)"]
openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"]
opentelemetry = ["opentelemetry-distro (>=0.35b0)"]
-opentelemetry-experimental = ["opentelemetry-instrumentation-aio-pika (==0.46b0)", "opentelemetry-instrumentation-aiohttp-client (==0.46b0)", "opentelemetry-instrumentation-aiopg (==0.46b0)", "opentelemetry-instrumentation-asgi (==0.46b0)", "opentelemetry-instrumentation-asyncio (==0.46b0)", "opentelemetry-instrumentation-asyncpg (==0.46b0)", "opentelemetry-instrumentation-aws-lambda (==0.46b0)", "opentelemetry-instrumentation-boto (==0.46b0)", "opentelemetry-instrumentation-boto3sqs (==0.46b0)", "opentelemetry-instrumentation-botocore (==0.46b0)", "opentelemetry-instrumentation-cassandra (==0.46b0)", "opentelemetry-instrumentation-celery (==0.46b0)", "opentelemetry-instrumentation-confluent-kafka (==0.46b0)", "opentelemetry-instrumentation-dbapi (==0.46b0)", "opentelemetry-instrumentation-django (==0.46b0)", "opentelemetry-instrumentation-elasticsearch (==0.46b0)", "opentelemetry-instrumentation-falcon (==0.46b0)", "opentelemetry-instrumentation-fastapi (==0.46b0)", "opentelemetry-instrumentation-flask (==0.46b0)", "opentelemetry-instrumentation-grpc (==0.46b0)", "opentelemetry-instrumentation-httpx (==0.46b0)", "opentelemetry-instrumentation-jinja2 (==0.46b0)", "opentelemetry-instrumentation-kafka-python (==0.46b0)", "opentelemetry-instrumentation-logging (==0.46b0)", "opentelemetry-instrumentation-mysql (==0.46b0)", "opentelemetry-instrumentation-mysqlclient (==0.46b0)", "opentelemetry-instrumentation-pika (==0.46b0)", "opentelemetry-instrumentation-psycopg (==0.46b0)", "opentelemetry-instrumentation-psycopg2 (==0.46b0)", "opentelemetry-instrumentation-pymemcache (==0.46b0)", "opentelemetry-instrumentation-pymongo (==0.46b0)", "opentelemetry-instrumentation-pymysql (==0.46b0)", "opentelemetry-instrumentation-pyramid (==0.46b0)", "opentelemetry-instrumentation-redis (==0.46b0)", "opentelemetry-instrumentation-remoulade (==0.46b0)", "opentelemetry-instrumentation-requests (==0.46b0)", "opentelemetry-instrumentation-sklearn (==0.46b0)", "opentelemetry-instrumentation-sqlalchemy (==0.46b0)", "opentelemetry-instrumentation-sqlite3 (==0.46b0)", "opentelemetry-instrumentation-starlette (==0.46b0)", "opentelemetry-instrumentation-system-metrics (==0.46b0)", "opentelemetry-instrumentation-threading (==0.46b0)", "opentelemetry-instrumentation-tornado (==0.46b0)", "opentelemetry-instrumentation-tortoiseorm (==0.46b0)", "opentelemetry-instrumentation-urllib (==0.46b0)", "opentelemetry-instrumentation-urllib3 (==0.46b0)", "opentelemetry-instrumentation-wsgi (==0.46b0)"]
+opentelemetry-experimental = ["opentelemetry-distro"]
pure-eval = ["asttokens", "executing", "pure-eval"]
pymongo = ["pymongo (>=3.1)"]
pyspark = ["pyspark (>=2.4.4)"]
@@ -3881,63 +6482,84 @@ test = ["pytest"]
[[package]]
name = "setuptools"
-version = "71.0.0"
+version = "75.1.0"
description = "Easily download, build, install, upgrade, and uninstall Python packages"
optional = false
python-versions = ">=3.8"
files = [
- {file = "setuptools-71.0.0-py3-none-any.whl", hash = "sha256:f06fbe978a91819d250a30e0dc4ca79df713d909e24438a42d0ec300fc52247f"},
- {file = "setuptools-71.0.0.tar.gz", hash = "sha256:98da3b8aca443b9848a209ae4165e2edede62633219afa493a58fbba57f72e2e"},
+ {file = "setuptools-75.1.0-py3-none-any.whl", hash = "sha256:35ab7fd3bcd95e6b7fd704e4a1539513edad446c097797f2985e0e4b960772f2"},
+ {file = "setuptools-75.1.0.tar.gz", hash = "sha256:d59a21b17a275fb872a9c3dae73963160ae079f1049ed956880cd7c09b120538"},
]
[package.extras]
-core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]
-doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (<7.4)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
-test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.10.0)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"]
+core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"]
+cover = ["pytest-cov"]
+doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"]
+enabler = ["pytest-enabler (>=2.2)"]
+test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"]
+type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"]
+
+[[package]]
+name = "sh"
+version = "2.1.0"
+description = "Python subprocess replacement"
+optional = true
+python-versions = "<4.0,>=3.8.1"
+files = [
+ {file = "sh-2.1.0-py3-none-any.whl", hash = "sha256:bf5e44178dd96a542126c2774e9b7ab1d89bfe0e2ef84d92e6d0ed7358d63d01"},
+ {file = "sh-2.1.0.tar.gz", hash = "sha256:7e27301c574bec8ca5bf6f211851357526455ee97cd27a7c4c6cc5e2375399cb"},
+]
[[package]]
name = "shapely"
-version = "2.0.5"
+version = "2.0.6"
description = "Manipulation and analysis of geometric objects"
optional = true
python-versions = ">=3.7"
files = [
- {file = "shapely-2.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:89d34787c44f77a7d37d55ae821f3a784fa33592b9d217a45053a93ade899375"},
- {file = "shapely-2.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:798090b426142df2c5258779c1d8d5734ec6942f778dab6c6c30cfe7f3bf64ff"},
- {file = "shapely-2.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45211276900c4790d6bfc6105cbf1030742da67594ea4161a9ce6812a6721e68"},
- {file = "shapely-2.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e119444bc27ca33e786772b81760f2028d930ac55dafe9bc50ef538b794a8e1"},
- {file = "shapely-2.0.5-cp310-cp310-win32.whl", hash = "sha256:9a4492a2b2ccbeaebf181e7310d2dfff4fdd505aef59d6cb0f217607cb042fb3"},
- {file = "shapely-2.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:1e5cb5ee72f1bc7ace737c9ecd30dc174a5295fae412972d3879bac2e82c8fae"},
- {file = "shapely-2.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5bbfb048a74cf273db9091ff3155d373020852805a37dfc846ab71dde4be93ec"},
- {file = "shapely-2.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93be600cbe2fbaa86c8eb70656369f2f7104cd231f0d6585c7d0aa555d6878b8"},
- {file = "shapely-2.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8e71bb9a46814019f6644c4e2560a09d44b80100e46e371578f35eaaa9da1c"},
- {file = "shapely-2.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5251c28a29012e92de01d2e84f11637eb1d48184ee8f22e2df6c8c578d26760"},
- {file = "shapely-2.0.5-cp311-cp311-win32.whl", hash = "sha256:35110e80070d664781ec7955c7de557456b25727a0257b354830abb759bf8311"},
- {file = "shapely-2.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c6b78c0007a34ce7144f98b7418800e0a6a5d9a762f2244b00ea560525290c9"},
- {file = "shapely-2.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:03bd7b5fa5deb44795cc0a503999d10ae9d8a22df54ae8d4a4cd2e8a93466195"},
- {file = "shapely-2.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ff9521991ed9e201c2e923da014e766c1aa04771bc93e6fe97c27dcf0d40ace"},
- {file = "shapely-2.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b65365cfbf657604e50d15161ffcc68de5cdb22a601bbf7823540ab4918a98d"},
- {file = "shapely-2.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21f64e647a025b61b19585d2247137b3a38a35314ea68c66aaf507a1c03ef6fe"},
- {file = "shapely-2.0.5-cp312-cp312-win32.whl", hash = "sha256:3ac7dc1350700c139c956b03d9c3df49a5b34aaf91d024d1510a09717ea39199"},
- {file = "shapely-2.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:30e8737983c9d954cd17feb49eb169f02f1da49e24e5171122cf2c2b62d65c95"},
- {file = "shapely-2.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ff7731fea5face9ec08a861ed351734a79475631b7540ceb0b66fb9732a5f529"},
- {file = "shapely-2.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ff9e520af0c5a578e174bca3c18713cd47a6c6a15b6cf1f50ac17dc8bb8db6a2"},
- {file = "shapely-2.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49b299b91557b04acb75e9732645428470825061f871a2edc36b9417d66c1fc5"},
- {file = "shapely-2.0.5-cp37-cp37m-win32.whl", hash = "sha256:b5870633f8e684bf6d1ae4df527ddcb6f3895f7b12bced5c13266ac04f47d231"},
- {file = "shapely-2.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:401cb794c5067598f50518e5a997e270cd7642c4992645479b915c503866abed"},
- {file = "shapely-2.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e91ee179af539100eb520281ba5394919067c6b51824e6ab132ad4b3b3e76dd0"},
- {file = "shapely-2.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8af6f7260f809c0862741ad08b1b89cb60c130ae30efab62320bbf4ee9cc71fa"},
- {file = "shapely-2.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5456dd522800306ba3faef77c5ba847ec30a0bd73ab087a25e0acdd4db2514f"},
- {file = "shapely-2.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b714a840402cde66fd7b663bb08cacb7211fa4412ea2a209688f671e0d0631fd"},
- {file = "shapely-2.0.5-cp38-cp38-win32.whl", hash = "sha256:7e8cf5c252fac1ea51b3162be2ec3faddedc82c256a1160fc0e8ddbec81b06d2"},
- {file = "shapely-2.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:4461509afdb15051e73ab178fae79974387f39c47ab635a7330d7fee02c68a3f"},
- {file = "shapely-2.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7545a39c55cad1562be302d74c74586f79e07b592df8ada56b79a209731c0219"},
- {file = "shapely-2.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c83a36f12ec8dee2066946d98d4d841ab6512a6ed7eb742e026a64854019b5f"},
- {file = "shapely-2.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89e640c2cd37378480caf2eeda9a51be64201f01f786d127e78eaeff091ec897"},
- {file = "shapely-2.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06efe39beafde3a18a21dde169d32f315c57da962826a6d7d22630025200c5e6"},
- {file = "shapely-2.0.5-cp39-cp39-win32.whl", hash = "sha256:8203a8b2d44dcb366becbc8c3d553670320e4acf0616c39e218c9561dd738d92"},
- {file = "shapely-2.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:7fed9dbfbcfec2682d9a047b9699db8dcc890dfca857ecba872c42185fc9e64e"},
- {file = "shapely-2.0.5.tar.gz", hash = "sha256:bff2366bc786bfa6cb353d6b47d0443c570c32776612e527ee47b6df63fcfe32"},
+ {file = "shapely-2.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29a34e068da2d321e926b5073539fd2a1d4429a2c656bd63f0bd4c8f5b236d0b"},
+ {file = "shapely-2.0.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c84c3f53144febf6af909d6b581bc05e8785d57e27f35ebaa5c1ab9baba13b"},
+ {file = "shapely-2.0.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad2fae12dca8d2b727fa12b007e46fbc522148a584f5d6546c539f3464dccde"},
+ {file = "shapely-2.0.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3304883bd82d44be1b27a9d17f1167fda8c7f5a02a897958d86c59ec69b705e"},
+ {file = "shapely-2.0.6-cp310-cp310-win32.whl", hash = "sha256:3ec3a0eab496b5e04633a39fa3d5eb5454628228201fb24903d38174ee34565e"},
+ {file = "shapely-2.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:28f87cdf5308a514763a5c38de295544cb27429cfa655d50ed8431a4796090c4"},
+ {file = "shapely-2.0.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5aeb0f51a9db176da9a30cb2f4329b6fbd1e26d359012bb0ac3d3c7781667a9e"},
+ {file = "shapely-2.0.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a7a78b0d51257a367ee115f4d41ca4d46edbd0dd280f697a8092dd3989867b2"},
+ {file = "shapely-2.0.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f32c23d2f43d54029f986479f7c1f6e09c6b3a19353a3833c2ffb226fb63a855"},
+ {file = "shapely-2.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dc9fb0eb56498912025f5eb352b5126f04801ed0e8bdbd867d21bdbfd7cbd0"},
+ {file = "shapely-2.0.6-cp311-cp311-win32.whl", hash = "sha256:d93b7e0e71c9f095e09454bf18dad5ea716fb6ced5df3cb044564a00723f339d"},
+ {file = "shapely-2.0.6-cp311-cp311-win_amd64.whl", hash = "sha256:c02eb6bf4cfb9fe6568502e85bb2647921ee49171bcd2d4116c7b3109724ef9b"},
+ {file = "shapely-2.0.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cec9193519940e9d1b86a3b4f5af9eb6910197d24af02f247afbfb47bcb3fab0"},
+ {file = "shapely-2.0.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83b94a44ab04a90e88be69e7ddcc6f332da7c0a0ebb1156e1c4f568bbec983c3"},
+ {file = "shapely-2.0.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:537c4b2716d22c92036d00b34aac9d3775e3691f80c7aa517c2c290351f42cd8"},
+ {file = "shapely-2.0.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98fea108334be345c283ce74bf064fa00cfdd718048a8af7343c59eb40f59726"},
+ {file = "shapely-2.0.6-cp312-cp312-win32.whl", hash = "sha256:42fd4cd4834747e4990227e4cbafb02242c0cffe9ce7ef9971f53ac52d80d55f"},
+ {file = "shapely-2.0.6-cp312-cp312-win_amd64.whl", hash = "sha256:665990c84aece05efb68a21b3523a6b2057e84a1afbef426ad287f0796ef8a48"},
+ {file = "shapely-2.0.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:42805ef90783ce689a4dde2b6b2f261e2c52609226a0438d882e3ced40bb3013"},
+ {file = "shapely-2.0.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6d2cb146191a47bd0cee8ff5f90b47547b82b6345c0d02dd8b25b88b68af62d7"},
+ {file = "shapely-2.0.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3fdef0a1794a8fe70dc1f514440aa34426cc0ae98d9a1027fb299d45741c381"},
+ {file = "shapely-2.0.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c665a0301c645615a107ff7f52adafa2153beab51daf34587170d85e8ba6805"},
+ {file = "shapely-2.0.6-cp313-cp313-win32.whl", hash = "sha256:0334bd51828f68cd54b87d80b3e7cee93f249d82ae55a0faf3ea21c9be7b323a"},
+ {file = "shapely-2.0.6-cp313-cp313-win_amd64.whl", hash = "sha256:d37d070da9e0e0f0a530a621e17c0b8c3c9d04105655132a87cfff8bd77cc4c2"},
+ {file = "shapely-2.0.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fa7468e4f5b92049c0f36d63c3e309f85f2775752e076378e36c6387245c5462"},
+ {file = "shapely-2.0.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed5867e598a9e8ac3291da6cc9baa62ca25706eea186117034e8ec0ea4355653"},
+ {file = "shapely-2.0.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81d9dfe155f371f78c8d895a7b7f323bb241fb148d848a2bf2244f79213123fe"},
+ {file = "shapely-2.0.6-cp37-cp37m-win32.whl", hash = "sha256:fbb7bf02a7542dba55129062570211cfb0defa05386409b3e306c39612e7fbcc"},
+ {file = "shapely-2.0.6-cp37-cp37m-win_amd64.whl", hash = "sha256:837d395fac58aa01aa544495b97940995211e3e25f9aaf87bc3ba5b3a8cd1ac7"},
+ {file = "shapely-2.0.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c6d88ade96bf02f6bfd667ddd3626913098e243e419a0325ebef2bbd481d1eb6"},
+ {file = "shapely-2.0.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8b3b818c4407eaa0b4cb376fd2305e20ff6df757bf1356651589eadc14aab41b"},
+ {file = "shapely-2.0.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbc783529a21f2bd50c79cef90761f72d41c45622b3e57acf78d984c50a5d13"},
+ {file = "shapely-2.0.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2423f6c0903ebe5df6d32e0066b3d94029aab18425ad4b07bf98c3972a6e25a1"},
+ {file = "shapely-2.0.6-cp38-cp38-win32.whl", hash = "sha256:2de00c3bfa80d6750832bde1d9487e302a6dd21d90cb2f210515cefdb616e5f5"},
+ {file = "shapely-2.0.6-cp38-cp38-win_amd64.whl", hash = "sha256:3a82d58a1134d5e975f19268710e53bddd9c473743356c90d97ce04b73e101ee"},
+ {file = "shapely-2.0.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:392f66f458a0a2c706254f473290418236e52aa4c9b476a072539d63a2460595"},
+ {file = "shapely-2.0.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eba5bae271d523c938274c61658ebc34de6c4b33fdf43ef7e938b5776388c1be"},
+ {file = "shapely-2.0.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7060566bc4888b0c8ed14b5d57df8a0ead5c28f9b69fb6bed4476df31c51b0af"},
+ {file = "shapely-2.0.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b02154b3e9d076a29a8513dffcb80f047a5ea63c897c0cd3d3679f29363cf7e5"},
+ {file = "shapely-2.0.6-cp39-cp39-win32.whl", hash = "sha256:44246d30124a4f1a638a7d5419149959532b99dfa25b54393512e6acc9c211ac"},
+ {file = "shapely-2.0.6-cp39-cp39-win_amd64.whl", hash = "sha256:2b542d7f1dbb89192d3512c52b679c822ba916f93479fa5d4fc2fe4fa0b3c9e8"},
+ {file = "shapely-2.0.6.tar.gz", hash = "sha256:997f6159b1484059ec239cacaa53467fd8b5564dabe186cd84ac2944663b0bf6"},
]
[package.dependencies]
@@ -3969,26 +6591,101 @@ files = [
{file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"},
]
+[[package]]
+name = "snakeviz"
+version = "2.2.0"
+description = "A web-based viewer for Python profiler output"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "snakeviz-2.2.0-py2.py3-none-any.whl", hash = "sha256:569e2d71c47f80a886aa6e70d6405cb6d30aa3520969ad956b06f824c5f02b8e"},
+ {file = "snakeviz-2.2.0.tar.gz", hash = "sha256:7bfd00be7ae147eb4a170a471578e1cd3f41f803238958b6b8efcf2c698a6aa9"},
+]
+
+[package.dependencies]
+tornado = ">=2.0"
+
+[[package]]
+name = "sniffio"
+version = "1.3.1"
+description = "Sniff out which async library your code is running under"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
+ {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
+]
+
[[package]]
name = "soupsieve"
-version = "2.5"
+version = "2.6"
description = "A modern CSS selector implementation for Beautiful Soup."
optional = false
python-versions = ">=3.8"
files = [
- {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"},
- {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"},
+ {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"},
+ {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"},
+]
+
+[[package]]
+name = "speechrecognition"
+version = "3.10.4"
+description = "Library for performing speech recognition, with support for several engines and APIs, online and offline."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "SpeechRecognition-3.10.4-py2.py3-none-any.whl", hash = "sha256:723b8155692a8ed11a30013f15f89a3e57c5dc8bc73c8cb024bf9bd14c21fba5"},
+ {file = "speechrecognition-3.10.4.tar.gz", hash = "sha256:986bafcf61f14625c2f3cea6a471838edd379ed68aeed7b8f3c0fb41e21f1125"},
+]
+
+[package.dependencies]
+requests = ">=2.26.0"
+typing-extensions = "*"
+
+[package.extras]
+dev = ["flake8", "rstcheck"]
+whisper-api = ["openai"]
+whisper-local = ["openai-whisper", "soundfile"]
+
+[[package]]
+name = "spidev"
+version = "3.6"
+description = "Python bindings for Linux SPI access through spidev"
+optional = true
+python-versions = "*"
+files = [
+ {file = "spidev-3.6-cp39-cp39-linux_armv7l.whl", hash = "sha256:280abc00a1ef7780ef62c3f294f52a2527b6c47d8c269fea98664970bcaf6da5"},
+ {file = "spidev-3.6.tar.gz", hash = "sha256:14dbc37594a4aaef85403ab617985d3c3ef464d62bc9b769ef552db53701115b"},
+]
+
+[[package]]
+name = "stack-data"
+version = "0.6.3"
+description = "Extract data from python stack frames and tracebacks for informative displays"
+optional = true
+python-versions = "*"
+files = [
+ {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"},
+ {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"},
]
+[package.dependencies]
+asttokens = ">=2.1.0"
+executing = ">=1.2.0"
+pure-eval = "*"
+
+[package.extras]
+tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"]
+
[[package]]
name = "sympy"
-version = "1.13.0"
+version = "1.13.3"
description = "Computer algebra system (CAS) in Python"
optional = false
python-versions = ">=3.8"
files = [
- {file = "sympy-1.13.0-py3-none-any.whl", hash = "sha256:6b0b32a4673fb91bd3cac3b55406c8e01d53ae22780be467301cc452f6680c92"},
- {file = "sympy-1.13.0.tar.gz", hash = "sha256:3b6af8f4d008b9a1a6a4268b335b984b23835f26d1d60b0526ebc71d48a25f57"},
+ {file = "sympy-1.13.3-py3-none-any.whl", hash = "sha256:54612cf55a62755ee71824ce692986f23c88ffa77207b30c1368eda4a7060f73"},
+ {file = "sympy-1.13.3.tar.gz", hash = "sha256:b27fd2c6530e0ab39e275fc9b683895367e51d5da91baa8d3d64db2565fec4d9"},
]
[package.dependencies]
@@ -3998,41 +6695,78 @@ mpmath = ">=1.1.0,<1.4"
dev = ["hypothesis (>=6.70.0)", "pytest (>=7.1.0)"]
[[package]]
-name = "tbb"
-version = "2021.13.0"
-description = "Intel® oneAPI Threading Building Blocks (oneTBB)"
-optional = false
-python-versions = "*"
+name = "tabulate"
+version = "0.9.0"
+description = "Pretty-print tabular data"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"},
+ {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"},
+]
+
+[package.extras]
+widechars = ["wcwidth"]
+
+[[package]]
+name = "tenacity"
+version = "9.0.0"
+description = "Retry code until it succeeds"
+optional = true
+python-versions = ">=3.8"
files = [
- {file = "tbb-2021.13.0-py2.py3-none-manylinux1_i686.whl", hash = "sha256:a2567725329639519d46d92a2634cf61e76601dac2f777a05686fea546c4fe4f"},
- {file = "tbb-2021.13.0-py2.py3-none-manylinux1_x86_64.whl", hash = "sha256:aaf667e92849adb012b8874d6393282afc318aca4407fc62f912ee30a22da46a"},
- {file = "tbb-2021.13.0-py3-none-win32.whl", hash = "sha256:6669d26703e9943f6164c6407bd4a237a45007e79b8d3832fe6999576eaaa9ef"},
- {file = "tbb-2021.13.0-py3-none-win_amd64.whl", hash = "sha256:3528a53e4bbe64b07a6112b4c5a00ff3c61924ee46c9c68e004a1ac7ad1f09c3"},
+ {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"},
+ {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"},
]
+[package.extras]
+doc = ["reno", "sphinx"]
+test = ["pytest", "tornado (>=4.5)", "typeguard"]
+
[[package]]
name = "termcolor"
-version = "2.4.0"
+version = "2.5.0"
description = "ANSI color formatting for output in terminal"
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
files = [
- {file = "termcolor-2.4.0-py3-none-any.whl", hash = "sha256:9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63"},
- {file = "termcolor-2.4.0.tar.gz", hash = "sha256:aab9e56047c8ac41ed798fa36d892a37aca6b3e9159f3e0c24bc64a9b3ac7b7a"},
+ {file = "termcolor-2.5.0-py3-none-any.whl", hash = "sha256:37b17b5fc1e604945c2642c872a3764b5d547a48009871aea3edd3afa180afb8"},
+ {file = "termcolor-2.5.0.tar.gz", hash = "sha256:998d8d27da6d48442e8e1f016119076b690d962507531df4890fcd2db2ef8a6f"},
]
[package.extras]
tests = ["pytest", "pytest-cov"]
+[[package]]
+name = "terminado"
+version = "0.18.1"
+description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "terminado-0.18.1-py3-none-any.whl", hash = "sha256:a4468e1b37bb318f8a86514f65814e1afc977cf29b3992a4500d9dd305dcceb0"},
+ {file = "terminado-0.18.1.tar.gz", hash = "sha256:de09f2c4b85de4765f7714688fff57d3e75bad1f909b589fde880460c753fd2e"},
+]
+
+[package.dependencies]
+ptyprocess = {version = "*", markers = "os_name != \"nt\""}
+pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""}
+tornado = ">=6.1.0"
+
+[package.extras]
+docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"]
+test = ["pre-commit", "pytest (>=7.0)", "pytest-timeout"]
+typing = ["mypy (>=1.6,<2.0)", "traitlets (>=5.11.1)"]
+
[[package]]
name = "tifffile"
-version = "2024.7.2"
+version = "2024.9.20"
description = "Read and write TIFF files"
optional = true
-python-versions = ">=3.9"
+python-versions = ">=3.10"
files = [
- {file = "tifffile-2024.7.2-py3-none-any.whl", hash = "sha256:5a2ee608c9cc1f2e044d943dacebddc71d4827b6fad150ef4c644b7aefbe2d1a"},
- {file = "tifffile-2024.7.2.tar.gz", hash = "sha256:02e52e8872c0e9943add686d2fd8bcfb18f0a824760882cf5e35fcbc2c80e32c"},
+ {file = "tifffile-2024.9.20-py3-none-any.whl", hash = "sha256:c54dc85bc1065d972cb8a6ffb3181389d597876aa80177933459733e4ed243dd"},
+ {file = "tifffile-2024.9.20.tar.gz", hash = "sha256:3fbf3be2f995a7051a8ae05a4be70c96fc0789f22ed6f1c4104c973cf68a640b"},
]
[package.dependencies]
@@ -4040,118 +6774,162 @@ numpy = "*"
[package.extras]
all = ["defusedxml", "fsspec", "imagecodecs (>=2023.8.12)", "lxml", "matplotlib", "zarr"]
+codecs = ["imagecodecs (>=2023.8.12)"]
+plot = ["matplotlib"]
+test = ["cmapfile", "czifile", "dask", "defusedxml", "fsspec", "imagecodecs", "lfdfiles", "lxml", "ndtiff", "oiffile", "psdtags", "pytest", "roifile", "xarray", "zarr"]
+xml = ["defusedxml", "lxml"]
+zarr = ["fsspec", "zarr"]
+
+[[package]]
+name = "tinycss2"
+version = "1.3.0"
+description = "A tiny CSS parser"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "tinycss2-1.3.0-py3-none-any.whl", hash = "sha256:54a8dbdffb334d536851be0226030e9505965bb2f30f21a4a82c55fb2a80fae7"},
+ {file = "tinycss2-1.3.0.tar.gz", hash = "sha256:152f9acabd296a8375fbca5b84c961ff95971fcfc32e79550c8df8e29118c54d"},
+]
+
+[package.dependencies]
+webencodings = ">=0.4"
+
+[package.extras]
+doc = ["sphinx", "sphinx_rtd_theme"]
+test = ["pytest", "ruff"]
[[package]]
name = "tomli"
-version = "2.0.1"
+version = "2.0.2"
description = "A lil' TOML parser"
optional = true
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
- {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
+ {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"},
+ {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"},
]
[[package]]
name = "torch"
-version = "2.3.1"
+version = "2.4.1"
description = "Tensors and Dynamic neural networks in Python with strong GPU acceleration"
optional = false
python-versions = ">=3.8.0"
files = [
- {file = "torch-2.3.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:605a25b23944be5ab7c3467e843580e1d888b8066e5aaf17ff7bf9cc30001cc3"},
- {file = "torch-2.3.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f2357eb0965583a0954d6f9ad005bba0091f956aef879822274b1bcdb11bd308"},
- {file = "torch-2.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:32b05fe0d1ada7f69c9f86c14ff69b0ef1957a5a54199bacba63d22d8fab720b"},
- {file = "torch-2.3.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:7c09a94362778428484bcf995f6004b04952106aee0ef45ff0b4bab484f5498d"},
- {file = "torch-2.3.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:b2ec81b61bb094ea4a9dee1cd3f7b76a44555375719ad29f05c0ca8ef596ad39"},
- {file = "torch-2.3.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:490cc3d917d1fe0bd027057dfe9941dc1d6d8e3cae76140f5dd9a7e5bc7130ab"},
- {file = "torch-2.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:5802530783bd465fe66c2df99123c9a54be06da118fbd785a25ab0a88123758a"},
- {file = "torch-2.3.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:a7dd4ed388ad1f3d502bf09453d5fe596c7b121de7e0cfaca1e2017782e9bbac"},
- {file = "torch-2.3.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:a486c0b1976a118805fc7c9641d02df7afbb0c21e6b555d3bb985c9f9601b61a"},
- {file = "torch-2.3.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:224259821fe3e4c6f7edf1528e4fe4ac779c77addaa74215eb0b63a5c474d66c"},
- {file = "torch-2.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:e5fdccbf6f1334b2203a61a0e03821d5845f1421defe311dabeae2fc8fbeac2d"},
- {file = "torch-2.3.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:3c333dc2ebc189561514eda06e81df22bf8fb64e2384746b2cb9f04f96d1d4c8"},
- {file = "torch-2.3.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:07e9ba746832b8d069cacb45f312cadd8ad02b81ea527ec9766c0e7404bb3feb"},
- {file = "torch-2.3.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:462d1c07dbf6bb5d9d2f3316fee73a24f3d12cd8dacf681ad46ef6418f7f6626"},
- {file = "torch-2.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff60bf7ce3de1d43ad3f6969983f321a31f0a45df3690921720bcad6a8596cc4"},
- {file = "torch-2.3.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:bee0bd33dc58aa8fc8a7527876e9b9a0e812ad08122054a5bff2ce5abf005b10"},
- {file = "torch-2.3.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:aaa872abde9a3d4f91580f6396d54888620f4a0b92e3976a6034759df4b961ad"},
- {file = "torch-2.3.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:3d7a7f7ef21a7520510553dc3938b0c57c116a7daee20736a9e25cbc0e832bdc"},
- {file = "torch-2.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:4777f6cefa0c2b5fa87223c213e7b6f417cf254a45e5829be4ccd1b2a4ee1011"},
- {file = "torch-2.3.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:2bb5af780c55be68fe100feb0528d2edebace1d55cb2e351de735809ba7391eb"},
+ {file = "torch-2.4.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:362f82e23a4cd46341daabb76fba08f04cd646df9bfaf5da50af97cb60ca4971"},
+ {file = "torch-2.4.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:e8ac1985c3ff0f60d85b991954cfc2cc25f79c84545aead422763148ed2759e3"},
+ {file = "torch-2.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:91e326e2ccfb1496e3bee58f70ef605aeb27bd26be07ba64f37dcaac3d070ada"},
+ {file = "torch-2.4.1-cp310-none-macosx_11_0_arm64.whl", hash = "sha256:d36a8ef100f5bff3e9c3cea934b9e0d7ea277cb8210c7152d34a9a6c5830eadd"},
+ {file = "torch-2.4.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:0b5f88afdfa05a335d80351e3cea57d38e578c8689f751d35e0ff36bce872113"},
+ {file = "torch-2.4.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:ef503165f2341942bfdf2bd520152f19540d0c0e34961232f134dc59ad435be8"},
+ {file = "torch-2.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:092e7c2280c860eff762ac08c4bdcd53d701677851670695e0c22d6d345b269c"},
+ {file = "torch-2.4.1-cp311-none-macosx_11_0_arm64.whl", hash = "sha256:ddddbd8b066e743934a4200b3d54267a46db02106876d21cf31f7da7a96f98ea"},
+ {file = "torch-2.4.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:fdc4fe11db3eb93c1115d3e973a27ac7c1a8318af8934ffa36b0370efe28e042"},
+ {file = "torch-2.4.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:18835374f599207a9e82c262153c20ddf42ea49bc76b6eadad8e5f49729f6e4d"},
+ {file = "torch-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:ebea70ff30544fc021d441ce6b219a88b67524f01170b1c538d7d3ebb5e7f56c"},
+ {file = "torch-2.4.1-cp312-none-macosx_11_0_arm64.whl", hash = "sha256:72b484d5b6cec1a735bf3fa5a1c4883d01748698c5e9cfdbeb4ffab7c7987e0d"},
+ {file = "torch-2.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:c99e1db4bf0c5347107845d715b4aa1097e601bdc36343d758963055e9599d93"},
+ {file = "torch-2.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:b57f07e92858db78c5b72857b4f0b33a65b00dc5d68e7948a8494b0314efb880"},
+ {file = "torch-2.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:f18197f3f7c15cde2115892b64f17c80dbf01ed72b008020e7da339902742cf6"},
+ {file = "torch-2.4.1-cp38-none-macosx_11_0_arm64.whl", hash = "sha256:5fc1d4d7ed265ef853579caf272686d1ed87cebdcd04f2a498f800ffc53dab71"},
+ {file = "torch-2.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:40f6d3fe3bae74efcf08cb7f8295eaddd8a838ce89e9d26929d4edd6d5e4329d"},
+ {file = "torch-2.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:c9299c16c9743001ecef515536ac45900247f4338ecdf70746f2461f9e4831db"},
+ {file = "torch-2.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:6bce130f2cd2d52ba4e2c6ada461808de7e5eccbac692525337cfb4c19421846"},
+ {file = "torch-2.4.1-cp39-none-macosx_11_0_arm64.whl", hash = "sha256:a38de2803ee6050309aac032676536c3d3b6a9804248537e38e098d0e14817ec"},
]
[package.dependencies]
filelock = "*"
fsspec = "*"
jinja2 = "*"
-mkl = {version = ">=2021.1.1,<=2021.4.0", markers = "platform_system == \"Windows\""}
networkx = "*"
nvidia-cublas-cu12 = {version = "12.1.3.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-cuda-cupti-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-cuda-nvrtc-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-cuda-runtime-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
-nvidia-cudnn-cu12 = {version = "8.9.2.26", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+nvidia-cudnn-cu12 = {version = "9.1.0.70", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-cufft-cu12 = {version = "11.0.2.54", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-curand-cu12 = {version = "10.3.2.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-cusolver-cu12 = {version = "11.4.5.107", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-cusparse-cu12 = {version = "12.1.0.106", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-nccl-cu12 = {version = "2.20.5", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
nvidia-nvtx-cu12 = {version = "12.1.105", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\""}
+setuptools = "*"
sympy = "*"
-triton = {version = "2.3.1", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.12\""}
+triton = {version = "3.0.0", markers = "platform_system == \"Linux\" and platform_machine == \"x86_64\" and python_version < \"3.13\""}
typing-extensions = ">=4.8.0"
[package.extras]
opt-einsum = ["opt-einsum (>=3.3)"]
-optree = ["optree (>=0.9.1)"]
+optree = ["optree (>=0.11.0)"]
[[package]]
name = "torchvision"
-version = "0.18.1"
+version = "0.19.1"
description = "image and video datasets and models for torch deep learning"
optional = false
python-versions = ">=3.8"
files = [
- {file = "torchvision-0.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3e694e54b0548dad99c12af6bf0c8e4f3350137d391dcd19af22a1c5f89322b3"},
- {file = "torchvision-0.18.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:0b3bda0aa5b416eeb547143b8eeaf17720bdba9cf516dc991aacb81811aa96a5"},
- {file = "torchvision-0.18.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:573ff523c739405edb085f65cb592f482d28a30e29b0be4c4ba08040b3ae785f"},
- {file = "torchvision-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:ef7bbbc60b38e831a75e547c66ca1784f2ac27100f9e4ddbe9614cef6cbcd942"},
- {file = "torchvision-0.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:80b5d794dd0fdba787adc22f1a367a5ead452327686473cb260dd94364bc56a6"},
- {file = "torchvision-0.18.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:9077cf590cdb3a5e8fdf5cdb71797f8c67713f974cf0228ecb17fcd670ab42f9"},
- {file = "torchvision-0.18.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:ceb993a882f1ae7ae373ed39c28d7e3e802205b0e59a7ed84ef4028f0bba8d7f"},
- {file = "torchvision-0.18.1-cp311-cp311-win_amd64.whl", hash = "sha256:52f7436140045dc2239cdc502aa76b2bd8bd676d64244ff154d304aa69852046"},
- {file = "torchvision-0.18.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2be6f0bf7c455c89a51a1dbb6f668d36c6edc479f49ac912d745d10df5715657"},
- {file = "torchvision-0.18.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:f118d887bfde3a948a41d56587525401e5cac1b7db2eaca203324d6ed2b1caca"},
- {file = "torchvision-0.18.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:13d24d904f65e62d66a1e0c41faec630bc193867b8a4a01166769e8a8e8df8e9"},
- {file = "torchvision-0.18.1-cp312-cp312-win_amd64.whl", hash = "sha256:ed6340b69a63a625e512a66127210d412551d9c5f2ad2978130c6a45bf56cd4a"},
- {file = "torchvision-0.18.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b1c3864fa9378c88bce8ad0ef3599f4f25397897ce612e1c245c74b97092f35e"},
- {file = "torchvision-0.18.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:02085a2ffc7461f5c0edb07d6f3455ee1806561f37736b903da820067eea58c7"},
- {file = "torchvision-0.18.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:9726c316a2501df8503e5a5dc46a631afd4c515a958972e5b7f7b9c87d2125c0"},
- {file = "torchvision-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:64a2662dbf30db9055d8b201d6e56f312a504e5ccd9d144c57c41622d3c524cb"},
- {file = "torchvision-0.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:975b8594c0f5288875408acbb74946eea786c5b008d129c0d045d0ead23742bc"},
- {file = "torchvision-0.18.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:da83c8bbd34d8bee48bfa1d1b40e0844bc3cba10ed825a5a8cbe3ce7b62264cd"},
- {file = "torchvision-0.18.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:54bfcd352abb396d5c9c237d200167c178bd136051b138e1e8ef46ce367c2773"},
- {file = "torchvision-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:5c8366a1aeee49e9ea9e64b30d199debdf06b1bd7610a76165eb5d7869c3bde5"},
+ {file = "torchvision-0.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:54e8513099e6f586356c70f809d34f391af71ad182fe071cc328a28af2c40608"},
+ {file = "torchvision-0.19.1-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:20a1f5e02bfdad7714e55fa3fa698347c11d829fa65e11e5a84df07d93350eed"},
+ {file = "torchvision-0.19.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:7b063116164be52fc6deb4762de7f8c90bfa3a65f8d5caf17f8e2d5aadc75a04"},
+ {file = "torchvision-0.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:f40b6acabfa886da1bc3768f47679c61feee6bde90deb979d9f300df8c8a0145"},
+ {file = "torchvision-0.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:40514282b4896d62765b8e26d7091c32e17c35817d00ec4be2362ea3ba3d1787"},
+ {file = "torchvision-0.19.1-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:5a91be061ae5d6d5b95e833b93e57ca4d3c56c5a57444dd15da2e3e7fba96050"},
+ {file = "torchvision-0.19.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:d71a6a6fe3a5281ca3487d4c56ad4aad20ff70f82f1d7c79bcb6e7b0c2af00c8"},
+ {file = "torchvision-0.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:70dea324174f5e9981b68e4b7cd524512c106ba64aedef560a86a0bbf2fbf62c"},
+ {file = "torchvision-0.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27ece277ff0f6cdc7fed0627279c632dcb2e58187da771eca24b0fbcf3f8590d"},
+ {file = "torchvision-0.19.1-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:c659ff92a61f188a1a7baef2850f3c0b6c85685447453c03d0e645ba8f1dcc1c"},
+ {file = "torchvision-0.19.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:c07bf43c2a145d792ecd9d0503d6c73577147ece508d45600d8aac77e4cdfcf9"},
+ {file = "torchvision-0.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b4283d283675556bb0eae31d29996f53861b17cbdcdf3509e6bc050414ac9289"},
+ {file = "torchvision-0.19.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c4e4f5b24ea6b087b02ed492ab1e21bba3352c4577e2def14248cfc60732338"},
+ {file = "torchvision-0.19.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:9281d63ead929bb19143731154cd1d8bf0b5e9873dff8578a40e90a6bec3c6fa"},
+ {file = "torchvision-0.19.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:4d10bc9083c4d5fadd7edd7b729700a7be48dab4f62278df3bc73fa48e48a155"},
+ {file = "torchvision-0.19.1-cp38-cp38-win_amd64.whl", hash = "sha256:ccf085ef1824fb9e16f1901285bf89c298c62dfd93267a39e8ee42c71255242f"},
+ {file = "torchvision-0.19.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:731f434d91586769e255b5d70ed1a4457e0a1394a95f4aacf0e1e7e21f80c098"},
+ {file = "torchvision-0.19.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:febe4f14d4afcb47cc861d8be7760ab6a123cd0817f97faf5771488cb6aa90f4"},
+ {file = "torchvision-0.19.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:e328309b8670a2e889b2fe76a1c2744a099c11c984da9a822357bd9debd699a5"},
+ {file = "torchvision-0.19.1-cp39-cp39-win_amd64.whl", hash = "sha256:6616f12e00a22e7f3fedbd0fccb0804c05e8fe22871668f10eae65cf3f283614"},
]
[package.dependencies]
numpy = "*"
pillow = ">=5.3.0,<8.3.dev0 || >=8.4.dev0"
-torch = "2.3.1"
+torch = "2.4.1"
[package.extras]
+gdown = ["gdown (>=4.7.3)"]
scipy = ["scipy"]
+[[package]]
+name = "tornado"
+version = "6.4.1"
+description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"},
+ {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"},
+ {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"},
+ {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"},
+ {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"},
+ {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"},
+ {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"},
+ {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"},
+ {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"},
+ {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"},
+ {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"},
+]
+
[[package]]
name = "tqdm"
-version = "4.66.4"
+version = "4.66.5"
description = "Fast, Extensible Progress Meter"
optional = false
python-versions = ">=3.7"
files = [
- {file = "tqdm-4.66.4-py3-none-any.whl", hash = "sha256:b75ca56b413b030bc3f00af51fd2c1a1a5eac6a0c1cca83cbb37a5c52abce644"},
- {file = "tqdm-4.66.4.tar.gz", hash = "sha256:e4d936c9de8727928f3be6079590e97d9abfe8d39a590be678eb5919ffc186bb"},
+ {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"},
+ {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"},
]
[package.dependencies]
@@ -4163,19 +6941,68 @@ notebook = ["ipywidgets (>=6)"]
slack = ["slack-sdk"]
telegram = ["requests"]
+[[package]]
+name = "traitlets"
+version = "5.14.3"
+description = "Traitlets Python configuration system"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"},
+ {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"},
+]
+
+[package.extras]
+docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"]
+test = ["argcomplete (>=3.0.3)", "mypy (>=1.7.0)", "pre-commit", "pytest (>=7.0,<8.2)", "pytest-mock", "pytest-mypy-testing"]
+
+[[package]]
+name = "transforms3d"
+version = "0.4.2"
+description = "Functions for 3D coordinate transformations"
+optional = true
+python-versions = ">=3.6"
+files = [
+ {file = "transforms3d-0.4.2-py3-none-any.whl", hash = "sha256:1c70399d9e9473ecc23311fd947f727f7c69ed0b063244828c383aa1aefa5941"},
+ {file = "transforms3d-0.4.2.tar.gz", hash = "sha256:e8b5df30eaedbee556e81c6938e55aab5365894e47d0a17615d7db7fd2393680"},
+]
+
+[package.dependencies]
+numpy = ">=1.15"
+
+[[package]]
+name = "trimesh"
+version = "4.4.7"
+description = "Import, export, process, analyze and view triangular meshes."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "trimesh-4.4.7-py3-none-any.whl", hash = "sha256:6df98f3f5b971945b416f567b7ff6ee0c51b70f01b80a16a990fdcceb8dbd114"},
+ {file = "trimesh-4.4.7.tar.gz", hash = "sha256:e6619c70c99006d41f175bd5e1ba2c8c3dfdb00c2b41d65059917942e2f6971a"},
+]
+
+[package.dependencies]
+numpy = ">=1.20"
+
+[package.extras]
+all = ["trimesh[deprecated,easy,recommend,test]"]
+deprecated = ["gmsh (==4.12.2)"]
+easy = ["chardet", "colorlog", "embreex", "httpx", "jsonschema", "lxml", "manifold3d (>=2.3.0)", "mapbox-earcut (>=1.0.2)", "networkx", "pillow", "pycollada", "rtree", "scipy", "setuptools", "shapely", "svg.path", "vhacdx", "xatlas", "xxhash"]
+recommend = ["cascadio", "glooey", "meshio", "openctm", "psutil", "pyglet (<2)", "python-fcl", "scikit-image", "sympy"]
+test = ["coveralls", "ezdxf", "matplotlib", "pyinstrument", "pymeshlab", "pyright", "pytest", "pytest-beartype", "pytest-cov", "ruff"]
+
[[package]]
name = "triton"
-version = "2.3.1"
+version = "3.0.0"
description = "A language and compiler for custom Deep Learning operations"
optional = false
python-versions = "*"
files = [
- {file = "triton-2.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c84595cbe5e546b1b290d2a58b1494df5a2ef066dd890655e5b8a8a92205c33"},
- {file = "triton-2.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9d64ae33bcb3a7a18081e3a746e8cf87ca8623ca13d2c362413ce7a486f893e"},
- {file = "triton-2.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaf80e8761a9e3498aa92e7bf83a085b31959c61f5e8ac14eedd018df6fccd10"},
- {file = "triton-2.3.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b13bf35a2b659af7159bf78e92798dc62d877aa991de723937329e2d382f1991"},
- {file = "triton-2.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63381e35ded3304704ea867ffde3b7cfc42c16a55b3062d41e017ef510433d66"},
- {file = "triton-2.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d968264523c7a07911c8fb51b4e0d1b920204dae71491b1fe7b01b62a31e124"},
+ {file = "triton-3.0.0-1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e1efef76935b2febc365bfadf74bcb65a6f959a9872e5bddf44cc9e0adce1e1a"},
+ {file = "triton-3.0.0-1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ce8520437c602fb633f1324cc3871c47bee3b67acf9756c1a66309b60e3216c"},
+ {file = "triton-3.0.0-1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:34e509deb77f1c067d8640725ef00c5cbfcb2052a1a3cb6a6d343841f92624eb"},
+ {file = "triton-3.0.0-1-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bcbf3b1c48af6a28011a5c40a5b3b9b5330530c3827716b5fbf6d7adcc1e53e9"},
+ {file = "triton-3.0.0-1-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:6e5727202f7078c56f91ff13ad0c1abab14a0e7f2c87e91b12b6f64f3e8ae609"},
]
[package.dependencies]
@@ -4183,8 +7010,19 @@ filelock = "*"
[package.extras]
build = ["cmake (>=3.20)", "lit"]
-tests = ["autopep8", "flake8", "isort", "numpy", "pytest", "scipy (>=1.7.1)", "torch"]
-tutorials = ["matplotlib", "pandas", "tabulate", "torch"]
+tests = ["autopep8", "flake8", "isort", "llnl-hatchet", "numpy", "pytest", "scipy (>=1.7.1)"]
+tutorials = ["matplotlib", "pandas", "tabulate"]
+
+[[package]]
+name = "types-python-dateutil"
+version = "2.9.0.20241003"
+description = "Typing stubs for python-dateutil"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "types-python-dateutil-2.9.0.20241003.tar.gz", hash = "sha256:58cb85449b2a56d6684e41aeefb4c4280631246a0da1a719bdbe6f3fb0317446"},
+ {file = "types_python_dateutil-2.9.0.20241003-py3-none-any.whl", hash = "sha256:250e1d8e80e7bbc3a6c99b907762711d1a1cdd00e978ad39cb5940f6f0a87f3d"},
+]
[[package]]
name = "typing-extensions"
@@ -4199,24 +7037,78 @@ files = [
[[package]]
name = "tzdata"
-version = "2024.1"
+version = "2024.2"
description = "Provider of IANA time zone data"
optional = false
python-versions = ">=2"
files = [
- {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"},
- {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},
+ {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"},
+ {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"},
+]
+
+[[package]]
+name = "urchin"
+version = "0.0.27"
+description = "URDF parser and manipulator for Python"
+optional = true
+python-versions = "*"
+files = [
+ {file = "urchin-0.0.27-py3-none-any.whl", hash = "sha256:e4cf43c8f52a44e0075e1778b76c203922085dd1fb9340cd703bf54188208611"},
+ {file = "urchin-0.0.27.tar.gz", hash = "sha256:bda308ed7d2b80eb1e097dc3963fabe9e00a6cbd89a1f6be6f063c2a065d3671"},
+]
+
+[package.dependencies]
+lxml = "*"
+networkx = "*"
+numpy = "*"
+pillow = "*"
+pycollada = ">=0.6"
+pyribbit = ">=0.1.46"
+scipy = "*"
+six = "*"
+trimesh = "*"
+
+[package.extras]
+dev = ["flake8", "pre-commit", "pytest", "pytest-cov", "tox"]
+docs = ["sphinx", "sphinx-automodapi", "sphinx-rtd-theme"]
+
+[[package]]
+name = "urdf-parser-py"
+version = "0.0.4"
+description = "This package contains a python parser for the Unified Robot Description Format (URDF), which is an XML format for representing a robot model."
+optional = true
+python-versions = "*"
+files = [
+ {file = "urdf_parser_py-0.0.4.tar.gz", hash = "sha256:e983f637145fded67bcff6a542302069bb975b2edf1b18318c093abba1b794cc"},
+]
+
+[package.dependencies]
+lxml = "*"
+pyyaml = "*"
+
+[[package]]
+name = "uri-template"
+version = "1.3.0"
+description = "RFC 6570 URI Template Processor"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"},
+ {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"},
]
+[package.extras]
+dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"]
+
[[package]]
name = "urllib3"
-version = "2.2.2"
+version = "2.2.3"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.8"
files = [
- {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"},
- {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"},
+ {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"},
+ {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"},
]
[package.extras]
@@ -4227,13 +7119,13 @@ zstd = ["zstandard (>=0.18.0)"]
[[package]]
name = "virtualenv"
-version = "20.26.3"
+version = "20.26.6"
description = "Virtual Python Environment builder"
optional = true
python-versions = ">=3.7"
files = [
- {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"},
- {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"},
+ {file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"},
+ {file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"},
]
[package.dependencies]
@@ -4247,18 +7139,21 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess
[[package]]
name = "wandb"
-version = "0.17.4"
+version = "0.18.3"
description = "A CLI and library for interacting with the Weights & Biases API."
optional = false
python-versions = ">=3.7"
files = [
- {file = "wandb-0.17.4-py3-none-any.whl", hash = "sha256:807d600a86ee9e2df66b64af738caa6eb3b5e04a30b296b9a7c940ede759f4a7"},
- {file = "wandb-0.17.4-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:02a00fe5e86ac04c07936d1fe7e7d9111d9546a3a844ac00d89735df0290cef8"},
- {file = "wandb-0.17.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:78eb7e6a530848fc47792e95563b406019bed7c4dadb54415a982cd4900b7c75"},
- {file = "wandb-0.17.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91bd3bc5b76f880732c6169288ac64d59a016fd8227d3a74bdc69d457639ef17"},
- {file = "wandb-0.17.4-py3-none-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40aa85e0fc6d875f9597c6c02e968bc35a571ddac1cbfac2223fcdb941bf1214"},
- {file = "wandb-0.17.4-py3-none-win32.whl", hash = "sha256:fec9378ae2ed8c4cf29bb5ef9bd2889f0085b6566d7b9dd7b789718519c46447"},
- {file = "wandb-0.17.4-py3-none-win_amd64.whl", hash = "sha256:94b78c1a74222a0970580cd953c21562bb57af1c2e14a4e48ab07b8fbcb64f08"},
+ {file = "wandb-0.18.3-py3-none-any.whl", hash = "sha256:7da64f7da0ff7572439de10bfd45534e8811e71e78ac2ccc3b818f1c0f3a9aef"},
+ {file = "wandb-0.18.3-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:6674d8a5c40c79065b9c7eb765136756d5ebc9457a5f9abc820a660fb23f8b67"},
+ {file = "wandb-0.18.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:741f566e409a2684d3047e4cc25e8e914d78196b901190937b24b6abb8b052e5"},
+ {file = "wandb-0.18.3-py3-none-macosx_11_0_x86_64.whl", hash = "sha256:8be5e877570b693001c52dcc2089e48e6a4dcbf15f3adf5c9349f95148b59d58"},
+ {file = "wandb-0.18.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d788852bd4739fa18de3918f309c3a955b5cef3247fae1c40df3a63af637e1a0"},
+ {file = "wandb-0.18.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab81424eb207d78239a8d69c90521a70074fb81e3709055484e43c76fe44dc08"},
+ {file = "wandb-0.18.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:2c91315b8b62423eae18577d66a4b4bb8e4341a7d5c849cb2963e3b3dff0bf6d"},
+ {file = "wandb-0.18.3-py3-none-win32.whl", hash = "sha256:92a647dab783938ec87776a9fae8a13e72e6dad939c53e357cdea9d2570f0ad8"},
+ {file = "wandb-0.18.3-py3-none-win_amd64.whl", hash = "sha256:29cac2cfa3124241fed22cfedc9a52e1500275ee9bbb0b428ce4bf63c4723bf0"},
+ {file = "wandb-0.18.3.tar.gz", hash = "sha256:eb2574cea72bc908c6ce1b37edf7a889619e6e06e1b4714eecfe0662ded43c06"},
]
[package.dependencies]
@@ -4266,7 +7161,7 @@ click = ">=7.1,<8.0.0 || >8.0.0"
docker-pycreds = ">=0.4.0"
gitpython = ">=1.0.0,<3.1.29 || >3.1.29"
platformdirs = "*"
-protobuf = {version = ">=3.19.0,<4.21.0 || >4.21.0,<6", markers = "python_version > \"3.9\" or sys_platform != \"linux\""}
+protobuf = {version = ">=3.19.0,<4.21.0 || >4.21.0,<5.28.0 || >5.28.0,<6", markers = "python_version > \"3.9\" or sys_platform != \"linux\""}
psutil = ">=5.0.0"
pyyaml = "*"
requests = ">=2.0.0,<3"
@@ -4278,10 +7173,10 @@ setuptools = "*"
aws = ["boto3"]
azure = ["azure-identity", "azure-storage-blob"]
gcp = ["google-cloud-storage"]
-importers = ["filelock", "mlflow", "polars", "rich", "tenacity"]
+importers = ["filelock", "mlflow", "polars (<=1.2.1)", "rich", "tenacity"]
kubeflow = ["google-cloud-storage", "kubernetes", "minio", "sh"]
-launch = ["awscli", "azure-containerregistry", "azure-identity", "azure-storage-blob", "boto3", "botocore", "chardet", "google-auth", "google-cloud-aiplatform", "google-cloud-artifact-registry", "google-cloud-compute", "google-cloud-storage", "iso8601", "kubernetes", "kubernetes-asyncio", "nbconvert", "nbformat", "optuna", "pydantic", "pyyaml (>=6.0.0)", "tomli", "typing-extensions"]
-media = ["bokeh", "moviepy", "numpy", "pillow", "plotly (>=5.18.0)", "rdkit-pypi", "soundfile"]
+launch = ["awscli", "azure-containerregistry", "azure-identity", "azure-storage-blob", "boto3", "botocore", "chardet", "google-auth", "google-cloud-aiplatform", "google-cloud-artifact-registry", "google-cloud-compute", "google-cloud-storage", "iso8601", "jsonschema", "kubernetes", "kubernetes-asyncio", "nbconvert", "nbformat", "optuna", "pydantic", "pyyaml (>=6.0.0)", "tomli", "typing-extensions"]
+media = ["bokeh", "imageio", "moviepy", "numpy", "pillow", "plotly (>=5.18.0)", "rdkit", "soundfile"]
models = ["cloudpickle"]
perf = ["orjson"]
sweeps = ["sweeps (>=0.2.0)"]
@@ -4298,15 +7193,57 @@ files = [
{file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"},
]
+[[package]]
+name = "webcolors"
+version = "24.8.0"
+description = "A library for working with the color formats defined by HTML and CSS."
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "webcolors-24.8.0-py3-none-any.whl", hash = "sha256:fc4c3b59358ada164552084a8ebee637c221e4059267d0f8325b3b560f6c7f0a"},
+ {file = "webcolors-24.8.0.tar.gz", hash = "sha256:08b07af286a01bcd30d583a7acadf629583d1f79bfef27dd2c2c5c263817277d"},
+]
+
+[package.extras]
+docs = ["furo", "sphinx", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-notfound-page", "sphinxext-opengraph"]
+tests = ["coverage[toml]"]
+
+[[package]]
+name = "webencodings"
+version = "0.5.1"
+description = "Character encoding aliases for legacy web content"
+optional = true
+python-versions = "*"
+files = [
+ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"},
+ {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"},
+]
+
+[[package]]
+name = "websocket-client"
+version = "1.8.0"
+description = "WebSocket client for Python with low level API options"
+optional = true
+python-versions = ">=3.8"
+files = [
+ {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"},
+ {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"},
+]
+
+[package.extras]
+docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"]
+optional = ["python-socks", "wsaccel"]
+test = ["websockets"]
+
[[package]]
name = "werkzeug"
-version = "3.0.3"
+version = "3.1.1"
description = "The comprehensive WSGI web application library."
optional = false
-python-versions = ">=3.8"
+python-versions = ">=3.9"
files = [
- {file = "werkzeug-3.0.3-py3-none-any.whl", hash = "sha256:fc9645dc43e03e4d630d23143a04a7f947a9a3b5727cd535fdfe155a17cc48c8"},
- {file = "werkzeug-3.0.3.tar.gz", hash = "sha256:097e5bfda9f0aba8da6b8545146def481d06aa7d3266e7448e2cccf67dd8bd18"},
+ {file = "werkzeug-3.1.1-py3-none-any.whl", hash = "sha256:a71124d1ef06008baafa3d266c02f56e1836a5984afd6dd6c9230669d60d9fb5"},
+ {file = "werkzeug-3.1.1.tar.gz", hash = "sha256:8cd39dfbdfc1e051965f156163e2974e52c210f130810e9ad36858f0fd3edad4"},
]
[package.dependencies]
@@ -4315,242 +7252,282 @@ MarkupSafe = ">=2.1.1"
[package.extras]
watchdog = ["watchdog (>=2.3)"]
+[[package]]
+name = "widgetsnbextension"
+version = "4.0.13"
+description = "Jupyter interactive widgets for Jupyter Notebook"
+optional = true
+python-versions = ">=3.7"
+files = [
+ {file = "widgetsnbextension-4.0.13-py3-none-any.whl", hash = "sha256:74b2692e8500525cc38c2b877236ba51d34541e6385eeed5aec15a70f88a6c71"},
+ {file = "widgetsnbextension-4.0.13.tar.gz", hash = "sha256:ffcb67bc9febd10234a362795f643927f4e0c05d9342c727b65d2384f8feacb6"},
+]
+
+[[package]]
+name = "xmltodict"
+version = "0.14.1"
+description = "Makes working with XML feel like you are working with JSON"
+optional = true
+python-versions = ">=3.6"
+files = [
+ {file = "xmltodict-0.14.1-py2.py3-none-any.whl", hash = "sha256:3ef4a7b71c08f19047fcbea572e1d7f4207ab269da1565b5d40e9823d3894e63"},
+ {file = "xmltodict-0.14.1.tar.gz", hash = "sha256:338c8431e4fc554517651972d62f06958718f6262b04316917008e8fd677a6b0"},
+]
+
[[package]]
name = "xxhash"
-version = "3.4.1"
+version = "3.5.0"
description = "Python binding for xxHash"
optional = false
python-versions = ">=3.7"
files = [
- {file = "xxhash-3.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91dbfa55346ad3e18e738742236554531a621042e419b70ad8f3c1d9c7a16e7f"},
- {file = "xxhash-3.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:665a65c2a48a72068fcc4d21721510df5f51f1142541c890491afc80451636d2"},
- {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb11628470a6004dc71a09fe90c2f459ff03d611376c1debeec2d648f44cb693"},
- {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bef2a7dc7b4f4beb45a1edbba9b9194c60a43a89598a87f1a0226d183764189"},
- {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0f7b2d547d72c7eda7aa817acf8791f0146b12b9eba1d4432c531fb0352228"},
- {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00f2fdef6b41c9db3d2fc0e7f94cb3db86693e5c45d6de09625caad9a469635b"},
- {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23cfd9ca09acaf07a43e5a695143d9a21bf00f5b49b15c07d5388cadf1f9ce11"},
- {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6a9ff50a3cf88355ca4731682c168049af1ca222d1d2925ef7119c1a78e95b3b"},
- {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f1d7c69a1e9ca5faa75546fdd267f214f63f52f12692f9b3a2f6467c9e67d5e7"},
- {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:672b273040d5d5a6864a36287f3514efcd1d4b1b6a7480f294c4b1d1ee1b8de0"},
- {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4178f78d70e88f1c4a89ff1ffe9f43147185930bb962ee3979dba15f2b1cc799"},
- {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9804b9eb254d4b8cc83ab5a2002128f7d631dd427aa873c8727dba7f1f0d1c2b"},
- {file = "xxhash-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c09c49473212d9c87261d22c74370457cfff5db2ddfc7fd1e35c80c31a8c14ce"},
- {file = "xxhash-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ebbb1616435b4a194ce3466d7247df23499475c7ed4eb2681a1fa42ff766aff6"},
- {file = "xxhash-3.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:25dc66be3db54f8a2d136f695b00cfe88018e59ccff0f3b8f545869f376a8a46"},
- {file = "xxhash-3.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58c49083801885273e262c0f5bbeac23e520564b8357fbb18fb94ff09d3d3ea5"},
- {file = "xxhash-3.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b526015a973bfbe81e804a586b703f163861da36d186627e27524f5427b0d520"},
- {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ad4457644c91a966f6fe137d7467636bdc51a6ce10a1d04f365c70d6a16d7e"},
- {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:248d3e83d119770f96003271fe41e049dd4ae52da2feb8f832b7a20e791d2920"},
- {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2070b6d5bbef5ee031666cf21d4953c16e92c2f8a24a94b5c240f8995ba3b1d0"},
- {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2746035f518f0410915e247877f7df43ef3372bf36cfa52cc4bc33e85242641"},
- {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ba6181514681c2591840d5632fcf7356ab287d4aff1c8dea20f3c78097088"},
- {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aac5010869240e95f740de43cd6a05eae180c59edd182ad93bf12ee289484fa"},
- {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4cb11d8debab1626181633d184b2372aaa09825bde709bf927704ed72765bed1"},
- {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b29728cff2c12f3d9f1d940528ee83918d803c0567866e062683f300d1d2eff3"},
- {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a15cbf3a9c40672523bdb6ea97ff74b443406ba0ab9bca10ceccd9546414bd84"},
- {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e66df260fed01ed8ea790c2913271641c58481e807790d9fca8bfd5a3c13844"},
- {file = "xxhash-3.4.1-cp311-cp311-win32.whl", hash = "sha256:e867f68a8f381ea12858e6d67378c05359d3a53a888913b5f7d35fbf68939d5f"},
- {file = "xxhash-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:200a5a3ad9c7c0c02ed1484a1d838b63edcf92ff538770ea07456a3732c577f4"},
- {file = "xxhash-3.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:1d03f1c0d16d24ea032e99f61c552cb2b77d502e545187338bea461fde253583"},
- {file = "xxhash-3.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c4bbba9b182697a52bc0c9f8ec0ba1acb914b4937cd4a877ad78a3b3eeabefb3"},
- {file = "xxhash-3.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9fd28a9da300e64e434cfc96567a8387d9a96e824a9be1452a1e7248b7763b78"},
- {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6066d88c9329ab230e18998daec53d819daeee99d003955c8db6fc4971b45ca3"},
- {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93805bc3233ad89abf51772f2ed3355097a5dc74e6080de19706fc447da99cd3"},
- {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64da57d5ed586ebb2ecdde1e997fa37c27fe32fe61a656b77fabbc58e6fbff6e"},
- {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a97322e9a7440bf3c9805cbaac090358b43f650516486746f7fa482672593df"},
- {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe750d512982ee7d831838a5dee9e9848f3fb440e4734cca3f298228cc957a6"},
- {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fd79d4087727daf4d5b8afe594b37d611ab95dc8e29fe1a7517320794837eb7d"},
- {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:743612da4071ff9aa4d055f3f111ae5247342931dedb955268954ef7201a71ff"},
- {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b41edaf05734092f24f48c0958b3c6cbaaa5b7e024880692078c6b1f8247e2fc"},
- {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a90356ead70d715fe64c30cd0969072de1860e56b78adf7c69d954b43e29d9fa"},
- {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ac56eebb364e44c85e1d9e9cc5f6031d78a34f0092fea7fc80478139369a8b4a"},
- {file = "xxhash-3.4.1-cp312-cp312-win32.whl", hash = "sha256:911035345932a153c427107397c1518f8ce456f93c618dd1c5b54ebb22e73747"},
- {file = "xxhash-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:f31ce76489f8601cc7b8713201ce94b4bd7b7ce90ba3353dccce7e9e1fee71fa"},
- {file = "xxhash-3.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:b5beb1c6a72fdc7584102f42c4d9df232ee018ddf806e8c90906547dfb43b2da"},
- {file = "xxhash-3.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6d42b24d1496deb05dee5a24ed510b16de1d6c866c626c2beb11aebf3be278b9"},
- {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b685fab18876b14a8f94813fa2ca80cfb5ab6a85d31d5539b7cd749ce9e3624"},
- {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419ffe34c17ae2df019a4685e8d3934d46b2e0bbe46221ab40b7e04ed9f11137"},
- {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e041ce5714f95251a88670c114b748bca3bf80cc72400e9f23e6d0d59cf2681"},
- {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc860d887c5cb2f524899fb8338e1bb3d5789f75fac179101920d9afddef284b"},
- {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:312eba88ffe0a05e332e3a6f9788b73883752be63f8588a6dc1261a3eaaaf2b2"},
- {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e01226b6b6a1ffe4e6bd6d08cfcb3ca708b16f02eb06dd44f3c6e53285f03e4f"},
- {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9f3025a0d5d8cf406a9313cd0d5789c77433ba2004b1c75439b67678e5136537"},
- {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:6d3472fd4afef2a567d5f14411d94060099901cd8ce9788b22b8c6f13c606a93"},
- {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:43984c0a92f06cac434ad181f329a1445017c33807b7ae4f033878d860a4b0f2"},
- {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a55e0506fdb09640a82ec4f44171273eeabf6f371a4ec605633adb2837b5d9d5"},
- {file = "xxhash-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:faec30437919555b039a8bdbaba49c013043e8f76c999670aef146d33e05b3a0"},
- {file = "xxhash-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c9e1b646af61f1fc7083bb7b40536be944f1ac67ef5e360bca2d73430186971a"},
- {file = "xxhash-3.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:961d948b7b1c1b6c08484bbce3d489cdf153e4122c3dfb07c2039621243d8795"},
- {file = "xxhash-3.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:719a378930504ab159f7b8e20fa2aa1896cde050011af838af7e7e3518dd82de"},
- {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74fb5cb9406ccd7c4dd917f16630d2e5e8cbbb02fc2fca4e559b2a47a64f4940"},
- {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dab508ac39e0ab988039bc7f962c6ad021acd81fd29145962b068df4148c476"},
- {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c59f3e46e7daf4c589e8e853d700ef6607afa037bfad32c390175da28127e8c"},
- {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cc07256eff0795e0f642df74ad096f8c5d23fe66bc138b83970b50fc7f7f6c5"},
- {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9f749999ed80f3955a4af0eb18bb43993f04939350b07b8dd2f44edc98ffee9"},
- {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7688d7c02149a90a3d46d55b341ab7ad1b4a3f767be2357e211b4e893efbaaf6"},
- {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a8b4977963926f60b0d4f830941c864bed16aa151206c01ad5c531636da5708e"},
- {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8106d88da330f6535a58a8195aa463ef5281a9aa23b04af1848ff715c4398fb4"},
- {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4c76a77dbd169450b61c06fd2d5d436189fc8ab7c1571d39265d4822da16df22"},
- {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:11f11357c86d83e53719c592021fd524efa9cf024dc7cb1dfb57bbbd0d8713f2"},
- {file = "xxhash-3.4.1-cp38-cp38-win32.whl", hash = "sha256:0c786a6cd74e8765c6809892a0d45886e7c3dc54de4985b4a5eb8b630f3b8e3b"},
- {file = "xxhash-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:aabf37fb8fa27430d50507deeab2ee7b1bcce89910dd10657c38e71fee835594"},
- {file = "xxhash-3.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6127813abc1477f3a83529b6bbcfeddc23162cece76fa69aee8f6a8a97720562"},
- {file = "xxhash-3.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef2e194262f5db16075caea7b3f7f49392242c688412f386d3c7b07c7733a70a"},
- {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71be94265b6c6590f0018bbf73759d21a41c6bda20409782d8117e76cd0dfa8b"},
- {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10e0a619cdd1c0980e25eb04e30fe96cf8f4324758fa497080af9c21a6de573f"},
- {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa122124d2e3bd36581dd78c0efa5f429f5220313479fb1072858188bc2d5ff1"},
- {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17032f5a4fea0a074717fe33477cb5ee723a5f428de7563e75af64bfc1b1e10"},
- {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca7783b20e3e4f3f52f093538895863f21d18598f9a48211ad757680c3bd006f"},
- {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d77d09a1113899fad5f354a1eb4f0a9afcf58cefff51082c8ad643ff890e30cf"},
- {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:21287bcdd299fdc3328cc0fbbdeaa46838a1c05391264e51ddb38a3f5b09611f"},
- {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:dfd7a6cc483e20b4ad90224aeb589e64ec0f31e5610ab9957ff4314270b2bf31"},
- {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:543c7fcbc02bbb4840ea9915134e14dc3dc15cbd5a30873a7a5bf66039db97ec"},
- {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fe0a98d990e433013f41827b62be9ab43e3cf18e08b1483fcc343bda0d691182"},
- {file = "xxhash-3.4.1-cp39-cp39-win32.whl", hash = "sha256:b9097af00ebf429cc7c0e7d2fdf28384e4e2e91008130ccda8d5ae653db71e54"},
- {file = "xxhash-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:d699b921af0dcde50ab18be76c0d832f803034d80470703700cb7df0fbec2832"},
- {file = "xxhash-3.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:2be491723405e15cc099ade1280133ccfbf6322d2ef568494fb7d07d280e7eee"},
- {file = "xxhash-3.4.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:431625fad7ab5649368c4849d2b49a83dc711b1f20e1f7f04955aab86cd307bc"},
- {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc6dbd5fc3c9886a9e041848508b7fb65fd82f94cc793253990f81617b61fe49"},
- {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ff8dbd0ec97aec842476cb8ccc3e17dd288cd6ce3c8ef38bff83d6eb927817"},
- {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef73a53fe90558a4096e3256752268a8bdc0322f4692ed928b6cd7ce06ad4fe3"},
- {file = "xxhash-3.4.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:450401f42bbd274b519d3d8dcf3c57166913381a3d2664d6609004685039f9d3"},
- {file = "xxhash-3.4.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a162840cf4de8a7cd8720ff3b4417fbc10001eefdd2d21541a8226bb5556e3bb"},
- {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b736a2a2728ba45017cb67785e03125a79d246462dfa892d023b827007412c52"},
- {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d0ae4c2e7698adef58710d6e7a32ff518b66b98854b1c68e70eee504ad061d8"},
- {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6322c4291c3ff174dcd104fae41500e75dad12be6f3085d119c2c8a80956c51"},
- {file = "xxhash-3.4.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:dd59ed668801c3fae282f8f4edadf6dc7784db6d18139b584b6d9677ddde1b6b"},
- {file = "xxhash-3.4.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92693c487e39523a80474b0394645b393f0ae781d8db3474ccdcead0559ccf45"},
- {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4603a0f642a1e8d7f3ba5c4c25509aca6a9c1cc16f85091004a7028607ead663"},
- {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa45e8cbfbadb40a920fe9ca40c34b393e0b067082d94006f7f64e70c7490a6"},
- {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:595b252943b3552de491ff51e5bb79660f84f033977f88f6ca1605846637b7c6"},
- {file = "xxhash-3.4.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:562d8b8f783c6af969806aaacf95b6c7b776929ae26c0cd941d54644ea7ef51e"},
- {file = "xxhash-3.4.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:41ddeae47cf2828335d8d991f2d2b03b0bdc89289dc64349d712ff8ce59d0647"},
- {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c44d584afdf3c4dbb3277e32321d1a7b01d6071c1992524b6543025fb8f4206f"},
- {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd7bddb3a5b86213cc3f2c61500c16945a1b80ecd572f3078ddbbe68f9dabdfb"},
- {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ecb6c987b62437c2f99c01e97caf8d25660bf541fe79a481d05732e5236719c"},
- {file = "xxhash-3.4.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:696b4e18b7023527d5c50ed0626ac0520edac45a50ec7cf3fc265cd08b1f4c03"},
- {file = "xxhash-3.4.1.tar.gz", hash = "sha256:0379d6cf1ff987cd421609a264ce025e74f346e3e145dd106c0cc2e3ec3f99a9"},
+ {file = "xxhash-3.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ece616532c499ee9afbb83078b1b952beffef121d989841f7f4b3dc5ac0fd212"},
+ {file = "xxhash-3.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3171f693dbc2cef6477054a665dc255d996646b4023fe56cb4db80e26f4cc520"},
+ {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c5d3e570ef46adaf93fc81b44aca6002b5a4d8ca11bd0580c07eac537f36680"},
+ {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7cb29a034301e2982df8b1fe6328a84f4b676106a13e9135a0d7e0c3e9f806da"},
+ {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d0d307d27099bb0cbeea7260eb39ed4fdb99c5542e21e94bb6fd29e49c57a23"},
+ {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0342aafd421795d740e514bc9858ebddfc705a75a8c5046ac56d85fe97bf196"},
+ {file = "xxhash-3.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3dbbd9892c5ebffeca1ed620cf0ade13eb55a0d8c84e0751a6653adc6ac40d0c"},
+ {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4cc2d67fdb4d057730c75a64c5923abfa17775ae234a71b0200346bfb0a7f482"},
+ {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ec28adb204b759306a3d64358a5e5c07d7b1dd0ccbce04aa76cb9377b7b70296"},
+ {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1328f6d8cca2b86acb14104e381225a3d7b42c92c4b86ceae814e5c400dbb415"},
+ {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8d47ebd9f5d9607fd039c1fbf4994e3b071ea23eff42f4ecef246ab2b7334198"},
+ {file = "xxhash-3.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b96d559e0fcddd3343c510a0fe2b127fbff16bf346dd76280b82292567523442"},
+ {file = "xxhash-3.5.0-cp310-cp310-win32.whl", hash = "sha256:61c722ed8d49ac9bc26c7071eeaa1f6ff24053d553146d5df031802deffd03da"},
+ {file = "xxhash-3.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:9bed5144c6923cc902cd14bb8963f2d5e034def4486ab0bbe1f58f03f042f9a9"},
+ {file = "xxhash-3.5.0-cp310-cp310-win_arm64.whl", hash = "sha256:893074d651cf25c1cc14e3bea4fceefd67f2921b1bb8e40fcfeba56820de80c6"},
+ {file = "xxhash-3.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02c2e816896dc6f85922ced60097bcf6f008dedfc5073dcba32f9c8dd786f3c1"},
+ {file = "xxhash-3.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6027dcd885e21581e46d3c7f682cfb2b870942feeed58a21c29583512c3f09f8"},
+ {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1308fa542bbdbf2fa85e9e66b1077eea3a88bef38ee8a06270b4298a7a62a166"},
+ {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c28b2fdcee797e1c1961cd3bcd3d545cab22ad202c846235197935e1df2f8ef7"},
+ {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:924361811732ddad75ff23e90efd9ccfda4f664132feecb90895bade6a1b4623"},
+ {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89997aa1c4b6a5b1e5b588979d1da048a3c6f15e55c11d117a56b75c84531f5a"},
+ {file = "xxhash-3.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:685c4f4e8c59837de103344eb1c8a3851f670309eb5c361f746805c5471b8c88"},
+ {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:dbd2ecfbfee70bc1a4acb7461fa6af7748ec2ab08ac0fa298f281c51518f982c"},
+ {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:25b5a51dc3dfb20a10833c8eee25903fd2e14059e9afcd329c9da20609a307b2"},
+ {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a8fb786fb754ef6ff8c120cb96629fb518f8eb5a61a16aac3a979a9dbd40a084"},
+ {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a905ad00ad1e1c34fe4e9d7c1d949ab09c6fa90c919860c1534ff479f40fd12d"},
+ {file = "xxhash-3.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:963be41bcd49f53af6d795f65c0da9b4cc518c0dd9c47145c98f61cb464f4839"},
+ {file = "xxhash-3.5.0-cp311-cp311-win32.whl", hash = "sha256:109b436096d0a2dd039c355fa3414160ec4d843dfecc64a14077332a00aeb7da"},
+ {file = "xxhash-3.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:b702f806693201ad6c0a05ddbbe4c8f359626d0b3305f766077d51388a6bac58"},
+ {file = "xxhash-3.5.0-cp311-cp311-win_arm64.whl", hash = "sha256:c4dcb4120d0cc3cc448624147dba64e9021b278c63e34a38789b688fd0da9bf3"},
+ {file = "xxhash-3.5.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:14470ace8bd3b5d51318782cd94e6f94431974f16cb3b8dc15d52f3b69df8e00"},
+ {file = "xxhash-3.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59aa1203de1cb96dbeab595ded0ad0c0056bb2245ae11fac11c0ceea861382b9"},
+ {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08424f6648526076e28fae6ea2806c0a7d504b9ef05ae61d196d571e5c879c84"},
+ {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61a1ff00674879725b194695e17f23d3248998b843eb5e933007ca743310f793"},
+ {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2f2c61bee5844d41c3eb015ac652a0229e901074951ae48581d58bfb2ba01be"},
+ {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d32a592cac88d18cc09a89172e1c32d7f2a6e516c3dfde1b9adb90ab5df54a6"},
+ {file = "xxhash-3.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70dabf941dede727cca579e8c205e61121afc9b28516752fd65724be1355cc90"},
+ {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e5d0ddaca65ecca9c10dcf01730165fd858533d0be84c75c327487c37a906a27"},
+ {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e5b5e16c5a480fe5f59f56c30abdeba09ffd75da8d13f6b9b6fd224d0b4d0a2"},
+ {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:149b7914451eb154b3dfaa721315117ea1dac2cc55a01bfbd4df7c68c5dd683d"},
+ {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:eade977f5c96c677035ff39c56ac74d851b1cca7d607ab3d8f23c6b859379cab"},
+ {file = "xxhash-3.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fa9f547bd98f5553d03160967866a71056a60960be00356a15ecc44efb40ba8e"},
+ {file = "xxhash-3.5.0-cp312-cp312-win32.whl", hash = "sha256:f7b58d1fd3551b8c80a971199543379be1cee3d0d409e1f6d8b01c1a2eebf1f8"},
+ {file = "xxhash-3.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:fa0cafd3a2af231b4e113fba24a65d7922af91aeb23774a8b78228e6cd785e3e"},
+ {file = "xxhash-3.5.0-cp312-cp312-win_arm64.whl", hash = "sha256:586886c7e89cb9828bcd8a5686b12e161368e0064d040e225e72607b43858ba2"},
+ {file = "xxhash-3.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:37889a0d13b0b7d739cfc128b1c902f04e32de17b33d74b637ad42f1c55101f6"},
+ {file = "xxhash-3.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:97a662338797c660178e682f3bc180277b9569a59abfb5925e8620fba00b9fc5"},
+ {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f85e0108d51092bdda90672476c7d909c04ada6923c14ff9d913c4f7dc8a3bc"},
+ {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2fd827b0ba763ac919440042302315c564fdb797294d86e8cdd4578e3bc7f3"},
+ {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82085c2abec437abebf457c1d12fccb30cc8b3774a0814872511f0f0562c768c"},
+ {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07fda5de378626e502b42b311b049848c2ef38784d0d67b6f30bb5008642f8eb"},
+ {file = "xxhash-3.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c279f0d2b34ef15f922b77966640ade58b4ccdfef1c4d94b20f2a364617a493f"},
+ {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:89e66ceed67b213dec5a773e2f7a9e8c58f64daeb38c7859d8815d2c89f39ad7"},
+ {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bcd51708a633410737111e998ceb3b45d3dbc98c0931f743d9bb0a209033a326"},
+ {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3ff2c0a34eae7df88c868be53a8dd56fbdf592109e21d4bfa092a27b0bf4a7bf"},
+ {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:4e28503dccc7d32e0b9817aa0cbfc1f45f563b2c995b7a66c4c8a0d232e840c7"},
+ {file = "xxhash-3.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a6c50017518329ed65a9e4829154626f008916d36295b6a3ba336e2458824c8c"},
+ {file = "xxhash-3.5.0-cp313-cp313-win32.whl", hash = "sha256:53a068fe70301ec30d868ece566ac90d873e3bb059cf83c32e76012c889b8637"},
+ {file = "xxhash-3.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:80babcc30e7a1a484eab952d76a4f4673ff601f54d5142c26826502740e70b43"},
+ {file = "xxhash-3.5.0-cp313-cp313-win_arm64.whl", hash = "sha256:4811336f1ce11cac89dcbd18f3a25c527c16311709a89313c3acaf771def2d4b"},
+ {file = "xxhash-3.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6e5f70f6dca1d3b09bccb7daf4e087075ff776e3da9ac870f86ca316736bb4aa"},
+ {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e76e83efc7b443052dd1e585a76201e40b3411fe3da7af4fe434ec51b2f163b"},
+ {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33eac61d0796ca0591f94548dcfe37bb193671e0c9bcf065789b5792f2eda644"},
+ {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ec70a89be933ea49222fafc3999987d7899fc676f688dd12252509434636622"},
+ {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86b8e7f703ec6ff4f351cfdb9f428955859537125904aa8c963604f2e9d3e7"},
+ {file = "xxhash-3.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0adfbd36003d9f86c8c97110039f7539b379f28656a04097e7434d3eaf9aa131"},
+ {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:63107013578c8a730419adc05608756c3fa640bdc6abe806c3123a49fb829f43"},
+ {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:683b94dbd1ca67557850b86423318a2e323511648f9f3f7b1840408a02b9a48c"},
+ {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5d2a01dcce81789cf4b12d478b5464632204f4c834dc2d064902ee27d2d1f0ee"},
+ {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:a9d360a792cbcce2fe7b66b8d51274ec297c53cbc423401480e53b26161a290d"},
+ {file = "xxhash-3.5.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:f0b48edbebea1b7421a9c687c304f7b44d0677c46498a046079d445454504737"},
+ {file = "xxhash-3.5.0-cp37-cp37m-win32.whl", hash = "sha256:7ccb800c9418e438b44b060a32adeb8393764da7441eb52aa2aa195448935306"},
+ {file = "xxhash-3.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c3bc7bf8cb8806f8d1c9bf149c18708cb1c406520097d6b0a73977460ea03602"},
+ {file = "xxhash-3.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:74752ecaa544657d88b1d1c94ae68031e364a4d47005a90288f3bab3da3c970f"},
+ {file = "xxhash-3.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dee1316133c9b463aa81aca676bc506d3f80d8f65aeb0bba2b78d0b30c51d7bd"},
+ {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:602d339548d35a8579c6b013339fb34aee2df9b4e105f985443d2860e4d7ffaa"},
+ {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:695735deeddfb35da1677dbc16a083445360e37ff46d8ac5c6fcd64917ff9ade"},
+ {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1030a39ba01b0c519b1a82f80e8802630d16ab95dc3f2b2386a0b5c8ed5cbb10"},
+ {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5bc08f33c4966f4eb6590d6ff3ceae76151ad744576b5fc6c4ba8edd459fdec"},
+ {file = "xxhash-3.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160e0c19ee500482ddfb5d5570a0415f565d8ae2b3fd69c5dcfce8a58107b1c3"},
+ {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f1abffa122452481a61c3551ab3c89d72238e279e517705b8b03847b1d93d738"},
+ {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:d5e9db7ef3ecbfc0b4733579cea45713a76852b002cf605420b12ef3ef1ec148"},
+ {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:23241ff6423378a731d84864bf923a41649dc67b144debd1077f02e6249a0d54"},
+ {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:82b833d5563fefd6fceafb1aed2f3f3ebe19f84760fdd289f8b926731c2e6e91"},
+ {file = "xxhash-3.5.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0a80ad0ffd78bef9509eee27b4a29e56f5414b87fb01a888353e3d5bda7038bd"},
+ {file = "xxhash-3.5.0-cp38-cp38-win32.whl", hash = "sha256:50ac2184ffb1b999e11e27c7e3e70cc1139047e7ebc1aa95ed12f4269abe98d4"},
+ {file = "xxhash-3.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:392f52ebbb932db566973693de48f15ce787cabd15cf6334e855ed22ea0be5b3"},
+ {file = "xxhash-3.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfc8cdd7f33d57f0468b0614ae634cc38ab9202c6957a60e31d285a71ebe0301"},
+ {file = "xxhash-3.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e0c48b6300cd0b0106bf49169c3e0536408dfbeb1ccb53180068a18b03c662ab"},
+ {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe1a92cfbaa0a1253e339ccec42dbe6db262615e52df591b68726ab10338003f"},
+ {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33513d6cc3ed3b559134fb307aae9bdd94d7e7c02907b37896a6c45ff9ce51bd"},
+ {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eefc37f6138f522e771ac6db71a6d4838ec7933939676f3753eafd7d3f4c40bc"},
+ {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a606c8070ada8aa2a88e181773fa1ef17ba65ce5dd168b9d08038e2a61b33754"},
+ {file = "xxhash-3.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42eca420c8fa072cc1dd62597635d140e78e384a79bb4944f825fbef8bfeeef6"},
+ {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:604253b2143e13218ff1ef0b59ce67f18b8bd1c4205d2ffda22b09b426386898"},
+ {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6e93a5ad22f434d7876665444a97e713a8f60b5b1a3521e8df11b98309bff833"},
+ {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:7a46e1d6d2817ba8024de44c4fd79913a90e5f7265434cef97026215b7d30df6"},
+ {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:30eb2efe6503c379b7ab99c81ba4a779748e3830241f032ab46bd182bf5873af"},
+ {file = "xxhash-3.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c8aa771ff2c13dd9cda8166d685d7333d389fae30a4d2bb39d63ab5775de8606"},
+ {file = "xxhash-3.5.0-cp39-cp39-win32.whl", hash = "sha256:5ed9ebc46f24cf91034544b26b131241b699edbfc99ec5e7f8f3d02d6eb7fba4"},
+ {file = "xxhash-3.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:220f3f896c6b8d0316f63f16c077d52c412619e475f9372333474ee15133a558"},
+ {file = "xxhash-3.5.0-cp39-cp39-win_arm64.whl", hash = "sha256:a7b1d8315d9b5e9f89eb2933b73afae6ec9597a258d52190944437158b49d38e"},
+ {file = "xxhash-3.5.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2014c5b3ff15e64feecb6b713af12093f75b7926049e26a580e94dcad3c73d8c"},
+ {file = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fab81ef75003eda96239a23eda4e4543cedc22e34c373edcaf744e721a163986"},
+ {file = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e2febf914ace002132aa09169cc572e0d8959d0f305f93d5828c4836f9bc5a6"},
+ {file = "xxhash-3.5.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5d3a10609c51da2a1c0ea0293fc3968ca0a18bd73838455b5bca3069d7f8e32b"},
+ {file = "xxhash-3.5.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5a74f23335b9689b66eb6dbe2a931a88fcd7a4c2cc4b1cb0edba8ce381c7a1da"},
+ {file = "xxhash-3.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2b4154c00eb22e4d543f472cfca430e7962a0f1d0f3778334f2e08a7ba59363c"},
+ {file = "xxhash-3.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d30bbc1644f726b825b3278764240f449d75f1a8bdda892e641d4a688b1494ae"},
+ {file = "xxhash-3.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa0b72f2423e2aa53077e54a61c28e181d23effeaafd73fcb9c494e60930c8e"},
+ {file = "xxhash-3.5.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13de2b76c1835399b2e419a296d5b38dc4855385d9e96916299170085ef72f57"},
+ {file = "xxhash-3.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0691bfcc4f9c656bcb96cc5db94b4d75980b9d5589f2e59de790091028580837"},
+ {file = "xxhash-3.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:297595fe6138d4da2c8ce9e72a04d73e58725bb60f3a19048bc96ab2ff31c692"},
+ {file = "xxhash-3.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc1276d369452040cbb943300dc8abeedab14245ea44056a2943183822513a18"},
+ {file = "xxhash-3.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2061188a1ba352fc699c82bff722f4baacb4b4b8b2f0c745d2001e56d0dfb514"},
+ {file = "xxhash-3.5.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38c384c434021e4f62b8d9ba0bc9467e14d394893077e2c66d826243025e1f81"},
+ {file = "xxhash-3.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e6a4dd644d72ab316b580a1c120b375890e4c52ec392d4aef3c63361ec4d77d1"},
+ {file = "xxhash-3.5.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:531af8845aaadcadf951b7e0c1345c6b9c68a990eeb74ff9acd8501a0ad6a1c9"},
+ {file = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ce379bcaa9fcc00f19affa7773084dd09f5b59947b3fb47a1ceb0179f91aaa1"},
+ {file = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd1b2281d01723f076df3c8188f43f2472248a6b63118b036e641243656b1b0f"},
+ {file = "xxhash-3.5.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c770750cc80e8694492244bca7251385188bc5597b6a39d98a9f30e8da984e0"},
+ {file = "xxhash-3.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b150b8467852e1bd844387459aa6fbe11d7f38b56e901f9f3b3e6aba0d660240"},
+ {file = "xxhash-3.5.0.tar.gz", hash = "sha256:84f2caddf951c9cbf8dc2e22a89d4ccf5d86391ac6418fe81e3c67d0cf60b45f"},
]
[[package]]
name = "yarl"
-version = "1.9.4"
+version = "1.14.0"
description = "Yet another URL library"
optional = false
-python-versions = ">=3.7"
+python-versions = ">=3.8"
files = [
- {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"},
- {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"},
- {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"},
- {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"},
- {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"},
- {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"},
- {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"},
- {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"},
- {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"},
- {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"},
- {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"},
- {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"},
- {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"},
- {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"},
- {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"},
- {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"},
- {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"},
- {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"},
- {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"},
- {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"},
- {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"},
- {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"},
- {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"},
- {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"},
- {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"},
- {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"},
- {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"},
- {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"},
- {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"},
- {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"},
- {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"},
- {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"},
- {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"},
- {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"},
- {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"},
- {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"},
- {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"},
- {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"},
- {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"},
- {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"},
- {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"},
- {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"},
- {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"},
- {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"},
- {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"},
- {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"},
- {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"},
- {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"},
- {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"},
- {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"},
- {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"},
- {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"},
- {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"},
- {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"},
- {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"},
- {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"},
- {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"},
- {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"},
- {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"},
- {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"},
- {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"},
- {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"},
- {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"},
- {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"},
- {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"},
- {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"},
- {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"},
- {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"},
- {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"},
- {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"},
- {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"},
- {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"},
- {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"},
- {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"},
- {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"},
- {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"},
- {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"},
- {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"},
- {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"},
- {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"},
- {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"},
- {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"},
- {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"},
- {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"},
- {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"},
- {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"},
- {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"},
- {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"},
- {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"},
- {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"},
+ {file = "yarl-1.14.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1bfc25aa6a7c99cf86564210f79a0b7d4484159c67e01232b116e445b3036547"},
+ {file = "yarl-1.14.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0cf21f46a15d445417de8fc89f2568852cf57fe8ca1ab3d19ddb24d45c0383ae"},
+ {file = "yarl-1.14.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1dda53508df0de87b6e6b0a52d6718ff6c62a5aca8f5552748404963df639269"},
+ {file = "yarl-1.14.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:587c3cc59bc148a9b1c07a019346eda2549bc9f468acd2f9824d185749acf0a6"},
+ {file = "yarl-1.14.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3007a5b75cb50140708420fe688c393e71139324df599434633019314ceb8b59"},
+ {file = "yarl-1.14.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:06ff23462398333c78b6f4f8d3d70410d657a471c2c5bbe6086133be43fc8f1a"},
+ {file = "yarl-1.14.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:689a99a42ee4583fcb0d3a67a0204664aa1539684aed72bdafcbd505197a91c4"},
+ {file = "yarl-1.14.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0547ab1e9345dc468cac8368d88ea4c5bd473ebc1d8d755347d7401982b5dd8"},
+ {file = "yarl-1.14.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:742aef0a99844faaac200564ea6f5e08facb285d37ea18bd1a5acf2771f3255a"},
+ {file = "yarl-1.14.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:176110bff341b6730f64a1eb3a7070e12b373cf1c910a9337e7c3240497db76f"},
+ {file = "yarl-1.14.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:46a9772a1efa93f9cd170ad33101c1817c77e0e9914d4fe33e2da299d7cf0f9b"},
+ {file = "yarl-1.14.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:ee2c68e4f2dd1b1c15b849ba1c96fac105fca6ffdb7c1e8be51da6fabbdeafb9"},
+ {file = "yarl-1.14.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:047b258e00b99091b6f90355521f026238c63bd76dcf996d93527bb13320eefd"},
+ {file = "yarl-1.14.0-cp310-cp310-win32.whl", hash = "sha256:0aa92e3e30a04f9462a25077db689c4ac5ea9ab6cc68a2e563881b987d42f16d"},
+ {file = "yarl-1.14.0-cp310-cp310-win_amd64.whl", hash = "sha256:d9baec588f015d0ee564057aa7574313c53a530662ffad930b7886becc85abdf"},
+ {file = "yarl-1.14.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:07f9eaf57719d6721ab15805d85f4b01a5b509a0868d7320134371bcb652152d"},
+ {file = "yarl-1.14.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c14b504a74e58e2deb0378b3eca10f3d076635c100f45b113c18c770b4a47a50"},
+ {file = "yarl-1.14.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:16a682a127930f3fc4e42583becca6049e1d7214bcad23520c590edd741d2114"},
+ {file = "yarl-1.14.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73bedd2be05f48af19f0f2e9e1353921ce0c83f4a1c9e8556ecdcf1f1eae4892"},
+ {file = "yarl-1.14.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3ab950f8814f3b7b5e3eebc117986f817ec933676f68f0a6c5b2137dd7c9c69"},
+ {file = "yarl-1.14.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b693c63e7e64b524f54aa4888403c680342d1ad0d97be1707c531584d6aeeb4f"},
+ {file = "yarl-1.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85cb3e40eaa98489f1e2e8b29f5ad02ee1ee40d6ce6b88d50cf0f205de1d9d2c"},
+ {file = "yarl-1.14.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f24f08b6c9b9818fd80612c97857d28f9779f0d1211653ece9844fc7b414df2"},
+ {file = "yarl-1.14.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:29a84a46ec3ebae7a1c024c055612b11e9363a8a23238b3e905552d77a2bc51b"},
+ {file = "yarl-1.14.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5cd5dad8366e0168e0fd23d10705a603790484a6dbb9eb272b33673b8f2cce72"},
+ {file = "yarl-1.14.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a152751af7ef7b5d5fa6d215756e508dd05eb07d0cf2ba51f3e740076aa74373"},
+ {file = "yarl-1.14.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:3d569f877ed9a708e4c71a2d13d2940cb0791da309f70bd970ac1a5c088a0a92"},
+ {file = "yarl-1.14.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6a615cad11ec3428020fb3c5a88d85ce1b5c69fd66e9fcb91a7daa5e855325dd"},
+ {file = "yarl-1.14.0-cp311-cp311-win32.whl", hash = "sha256:bab03192091681d54e8225c53f270b0517637915d9297028409a2a5114ff4634"},
+ {file = "yarl-1.14.0-cp311-cp311-win_amd64.whl", hash = "sha256:985623575e5c4ea763056ffe0e2d63836f771a8c294b3de06d09480538316b13"},
+ {file = "yarl-1.14.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fc2c80bc87fba076e6cbb926216c27fba274dae7100a7b9a0983b53132dd99f2"},
+ {file = "yarl-1.14.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:55c144d363ad4626ca744556c049c94e2b95096041ac87098bb363dcc8635e8d"},
+ {file = "yarl-1.14.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b03384eed107dbeb5f625a99dc3a7de8be04fc8480c9ad42fccbc73434170b20"},
+ {file = "yarl-1.14.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f72a0d746d38cb299b79ce3d4d60ba0892c84bbc905d0d49c13df5bace1b65f8"},
+ {file = "yarl-1.14.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8648180b34faaea4aa5b5ca7e871d9eb1277033fa439693855cf0ea9195f85f1"},
+ {file = "yarl-1.14.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9557c9322aaa33174d285b0c1961fb32499d65ad1866155b7845edc876c3c835"},
+ {file = "yarl-1.14.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f50eb3837012a937a2b649ec872b66ba9541ad9d6f103ddcafb8231cfcafd22"},
+ {file = "yarl-1.14.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8892fa575ac9b1b25fae7b221bc4792a273877b9b56a99ee2d8d03eeb3dbb1d2"},
+ {file = "yarl-1.14.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e6a2c5c5bb2556dfbfffffc2bcfb9c235fd2b566d5006dfb2a37afc7e3278a07"},
+ {file = "yarl-1.14.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ab3abc0b78a5dfaa4795a6afbe7b282b6aa88d81cf8c1bb5e394993d7cae3457"},
+ {file = "yarl-1.14.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:47eede5d11d669ab3759b63afb70d28d5328c14744b8edba3323e27dc52d298d"},
+ {file = "yarl-1.14.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:fe4d2536c827f508348d7b40c08767e8c7071614250927233bf0c92170451c0a"},
+ {file = "yarl-1.14.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0fd7b941dd1b00b5f0acb97455fea2c4b7aac2dd31ea43fb9d155e9bc7b78664"},
+ {file = "yarl-1.14.0-cp312-cp312-win32.whl", hash = "sha256:99ff3744f5fe48288be6bc402533b38e89749623a43208e1d57091fc96b783b9"},
+ {file = "yarl-1.14.0-cp312-cp312-win_amd64.whl", hash = "sha256:1ca3894e9e9f72da93544f64988d9c052254a338a9f855165f37f51edb6591de"},
+ {file = "yarl-1.14.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:5d02d700705d67e09e1f57681f758f0b9d4412eeb70b2eb8d96ca6200b486db3"},
+ {file = "yarl-1.14.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:30600ba5db60f7c0820ef38a2568bb7379e1418ecc947a0f76fd8b2ff4257a97"},
+ {file = "yarl-1.14.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e85d86527baebb41a214cc3b45c17177177d900a2ad5783dbe6f291642d4906f"},
+ {file = "yarl-1.14.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37001e5d4621cef710c8dc1429ca04e189e572f128ab12312eab4e04cf007132"},
+ {file = "yarl-1.14.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f4f4547944d4f5cfcdc03f3f097d6f05bbbc915eaaf80a2ee120d0e756de377d"},
+ {file = "yarl-1.14.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75ff4c819757f9bdb35de049a509814d6ce851fe26f06eb95a392a5640052482"},
+ {file = "yarl-1.14.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68ac1a09392ed6e3fd14be880d39b951d7b981fd135416db7d18a6208c536561"},
+ {file = "yarl-1.14.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96952f642ac69075e44c7d0284528938fdff39422a1d90d3e45ce40b72e5e2d9"},
+ {file = "yarl-1.14.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a56fbe3d7f3bce1d060ea18d2413a2ca9ca814eea7cedc4d247b5f338d54844e"},
+ {file = "yarl-1.14.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7e2637d75e92763d1322cb5041573279ec43a80c0f7fbbd2d64f5aee98447b17"},
+ {file = "yarl-1.14.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:9abe80ae2c9d37c17599557b712e6515f4100a80efb2cda15f5f070306477cd2"},
+ {file = "yarl-1.14.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:217a782020b875538eebf3948fac3a7f9bbbd0fd9bf8538f7c2ad7489e80f4e8"},
+ {file = "yarl-1.14.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9cfef3f14f75bf6aba73a76caf61f9d00865912a04a4393c468a7ce0981b519"},
+ {file = "yarl-1.14.0-cp313-cp313-win32.whl", hash = "sha256:d8361c7d04e6a264481f0b802e395f647cd3f8bbe27acfa7c12049efea675bd1"},
+ {file = "yarl-1.14.0-cp313-cp313-win_amd64.whl", hash = "sha256:bc24f968b82455f336b79bf37dbb243b7d76cd40897489888d663d4e028f5069"},
+ {file = "yarl-1.14.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:91d875f75fabf76b3018c5f196bf3d308ed2b49ddcb46c1576d6b075754a1393"},
+ {file = "yarl-1.14.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4009def9be3a7e5175db20aa2d7307ecd00bbf50f7f0f989300710eee1d0b0b9"},
+ {file = "yarl-1.14.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:582cedde49603f139be572252a318b30dc41039bc0b8165f070f279e5d12187f"},
+ {file = "yarl-1.14.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbd9ff43a04f8ffe8a959a944c2dca10d22f5f99fc6a459f49c3ebfb409309d9"},
+ {file = "yarl-1.14.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f805e37ed16cc212fdc538a608422d7517e7faf539bedea4fe69425bc55d76"},
+ {file = "yarl-1.14.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:95e16e9eaa2d7f5d87421b8fe694dd71606aa61d74b824c8d17fc85cc51983d1"},
+ {file = "yarl-1.14.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:816d24f584edefcc5ca63428f0b38fee00b39fe64e3c5e558f895a18983efe96"},
+ {file = "yarl-1.14.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd2660c01367eb3ef081b8fa0a5da7fe767f9427aa82023a961a5f28f0d4af6c"},
+ {file = "yarl-1.14.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:94b2bb9bcfd5be9d27004ea4398fb640373dd0c1a9e219084f42c08f77a720ab"},
+ {file = "yarl-1.14.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c2089a9afef887664115f7fa6d3c0edd6454adaca5488dba836ca91f60401075"},
+ {file = "yarl-1.14.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2192f718db4a8509f63dd6d950f143279211fa7e6a2c612edc17d85bf043d36e"},
+ {file = "yarl-1.14.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:8385ab36bf812e9d37cf7613999a87715f27ef67a53f0687d28c44b819df7cb0"},
+ {file = "yarl-1.14.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:b4c1ecba93e7826dc71ddba75fb7740cdb52e7bd0be9f03136b83f54e6a1f511"},
+ {file = "yarl-1.14.0-cp38-cp38-win32.whl", hash = "sha256:e749af6c912a7bb441d105c50c1a3da720474e8acb91c89350080dd600228f0e"},
+ {file = "yarl-1.14.0-cp38-cp38-win_amd64.whl", hash = "sha256:147e36331f6f63e08a14640acf12369e041e0751bb70d9362df68c2d9dcf0c87"},
+ {file = "yarl-1.14.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a9f917966d27f7ce30039fe8d900f913c5304134096554fd9bea0774bcda6d1"},
+ {file = "yarl-1.14.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a2f8fb7f944bcdfecd4e8d855f84c703804a594da5123dd206f75036e536d4d"},
+ {file = "yarl-1.14.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f4e475f29a9122f908d0f1f706e1f2fc3656536ffd21014ff8a6f2e1b14d1d8"},
+ {file = "yarl-1.14.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8089d4634d8fa2b1806ce44fefa4979b1ab2c12c0bc7ef3dfa45c8a374811348"},
+ {file = "yarl-1.14.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b16f6c75cffc2dc0616ea295abb0e1967601bd1fb1e0af6a1de1c6c887f3439"},
+ {file = "yarl-1.14.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:498b3c55087b9d762636bca9b45f60d37e51d24341786dc01b81253f9552a607"},
+ {file = "yarl-1.14.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3f8bfc1db82589ef965ed234b87de30d140db8b6dc50ada9e33951ccd8ec07a"},
+ {file = "yarl-1.14.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:625f207b1799e95e7c823f42f473c1e9dbfb6192bd56bba8695656d92be4535f"},
+ {file = "yarl-1.14.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:781e2495e408a81e4eaeedeb41ba32b63b1980dddf8b60dbbeff6036bcd35049"},
+ {file = "yarl-1.14.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:659603d26d40dd4463200df9bfbc339fbfaed3fe32e5c432fe1dc2b5d4aa94b4"},
+ {file = "yarl-1.14.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4e0d45ebf975634468682c8bec021618b3ad52c37619e5c938f8f831fa1ac5c0"},
+ {file = "yarl-1.14.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a2e4725a08cb2b4794db09e350c86dee18202bb8286527210e13a1514dc9a59a"},
+ {file = "yarl-1.14.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:19268b4fec1d7760134f2de46ef2608c2920134fb1fa61e451f679e41356dc55"},
+ {file = "yarl-1.14.0-cp39-cp39-win32.whl", hash = "sha256:337912bcdcf193ade64b9aae5a4017a0a1950caf8ca140362e361543c6773f21"},
+ {file = "yarl-1.14.0-cp39-cp39-win_amd64.whl", hash = "sha256:b6d0147574ce2e7b812c989e50fa72bbc5338045411a836bd066ce5fc8ac0bce"},
+ {file = "yarl-1.14.0-py3-none-any.whl", hash = "sha256:c8ed4034f0765f8861620c1f2f2364d2e58520ea288497084dae880424fc0d9f"},
+ {file = "yarl-1.14.0.tar.gz", hash = "sha256:88c7d9d58aab0724b979ab5617330acb1c7030b79379c8138c1c8c94e121d1b3"},
]
[package.dependencies]
idna = ">=2.0"
multidict = ">=4.0"
+propcache = ">=0.2.0"
[[package]]
name = "zarr"
-version = "2.18.2"
+version = "2.18.3"
description = "An implementation of chunked, compressed, N-dimensional arrays for Python"
optional = false
-python-versions = ">=3.9"
+python-versions = ">=3.10"
files = [
- {file = "zarr-2.18.2-py3-none-any.whl", hash = "sha256:a638754902f97efa99b406083fdc807a0e2ccf12a949117389d2a4ba9b05df38"},
- {file = "zarr-2.18.2.tar.gz", hash = "sha256:9bb393b8a0a38fb121dbb913b047d75db28de9890f6d644a217a73cf4ae74f47"},
+ {file = "zarr-2.18.3-py3-none-any.whl", hash = "sha256:b1f7dfd2496f436745cdd4c7bcf8d3b4bc1dceef5fdd0d589c87130d842496dd"},
+ {file = "zarr-2.18.3.tar.gz", hash = "sha256:2580d8cb6dd84621771a10d31c4d777dca8a27706a1a89b29f42d2d37e2df5ce"},
]
[package.dependencies]
asciitree = "*"
fasteners = {version = "*", markers = "sys_platform != \"emscripten\""}
numcodecs = ">=0.10.0"
-numpy = ">=1.23"
+numpy = ">=1.24"
[package.extras]
docs = ["numcodecs[msgpack]", "numpydoc", "pydata-sphinx-theme", "sphinx", "sphinx-automodapi", "sphinx-copybutton", "sphinx-design", "sphinx-issues"]
@@ -4558,27 +7535,33 @@ jupyter = ["ipytree (>=0.2.2)", "ipywidgets (>=8.0.0)", "notebook"]
[[package]]
name = "zipp"
-version = "3.19.2"
+version = "3.20.2"
description = "Backport of pathlib-compatible object wrapper for zip files"
optional = false
python-versions = ">=3.8"
files = [
- {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"},
- {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"},
+ {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"},
+ {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"},
]
[package.extras]
+check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
+cover = ["pytest-cov"]
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
-test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"]
+enabler = ["pytest-enabler (>=2.2)"]
+test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"]
+type = ["pytest-mypy"]
[extras]
aloha = ["gym-aloha"]
dev = ["debugpy", "pre-commit"]
dora = ["gym-dora"]
dynamixel = ["dynamixel-sdk", "pynput"]
+feetech = ["feetech-servo-sdk", "pynput"]
intelrealsense = ["pyrealsense2"]
pusht = ["gym-pusht"]
-test = ["pytest", "pytest-cov"]
+stretch = ["hello-robot-stretch-body", "pynput", "pyrealsense2", "pyrender"]
+test = ["pyserial", "pytest", "pytest-cov"]
umi = ["imagecodecs"]
video-benchmark = ["pandas", "scikit-image"]
xarm = ["gym-xarm"]
@@ -4586,4 +7569,4 @@ xarm = ["gym-xarm"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.10,<3.13"
-content-hash = "c9c3beac71f760738baf2fd169378eefdaef7d3a9cd068270bc5190fbefdb42a"
+content-hash = "41344f0eb2d06d9a378abcd10df8205aa3926ff0a08ac5ab1a0b1bcae7440fd8"
diff --git a/pyproject.toml b/pyproject.toml
index f46e39da1..59c2de8bc 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -43,8 +43,8 @@ opencv-python = ">=4.9.0"
diffusers = ">=0.27.2"
torchvision = ">=0.17.1"
h5py = ">=3.10.0"
-huggingface-hub = {extras = ["hf-transfer", "cli"], version = ">=0.25.0"}
-gymnasium = ">=0.29.1"
+huggingface-hub = {extras = ["hf-transfer", "cli"], version = ">=0.25.2"}
+gymnasium = "==0.29.1" # TODO(rcadene, aliberts): Make gym 1.0.0 work
cmake = ">=3.29.0.1"
gym-dora = { git = "https://github.com/dora-rs/dora-lerobot.git", subdirectory = "gym_dora", optional = true }
gym-pusht = { version = ">=0.1.5", optional = true}
@@ -64,9 +64,13 @@ pandas = {version = ">=2.2.2", optional = true}
scikit-image = {version = ">=0.23.2", optional = true}
dynamixel-sdk = {version = ">=3.7.31", optional = true}
pynput = {version = ">=1.7.7", optional = true}
-# TODO(rcadene, salibert): 71.0.1 has a bug
-setuptools = {version = "!=71.0.1", optional = true}
-pyrealsense2 = {version = ">=2.55.1.6486", markers = "sys_platform != 'darwin'", optional = true}
+feetech-servo-sdk = {version = ">=1.0.0", optional = true}
+setuptools = {version = "!=71.0.1", optional = true} # TODO(rcadene, aliberts): 71.0.1 has a bug
+pyrealsense2 = {version = ">=2.55.1.6486", markers = "sys_platform != 'darwin'", optional = true} # TODO(rcadene, aliberts): Fix on Mac
+pyrender = {git = "https://github.com/mmatl/pyrender.git", markers = "sys_platform == 'linux'", optional = true}
+hello-robot-stretch-body = {version = ">=0.7.27", markers = "sys_platform == 'linux'", optional = true}
+pyserial = {version = ">=3.5", optional = true}
+jsonlines = ">=4.0.0"
[tool.poetry.extras]
@@ -75,11 +79,13 @@ pusht = ["gym-pusht"]
xarm = ["gym-xarm"]
aloha = ["gym-aloha"]
dev = ["pre-commit", "debugpy"]
-test = ["pytest", "pytest-cov"]
+test = ["pytest", "pytest-cov", "pyserial"]
umi = ["imagecodecs"]
video_benchmark = ["scikit-image", "pandas"]
dynamixel = ["dynamixel-sdk", "pynput"]
+feetech = ["feetech-servo-sdk", "pynput"]
intelrealsense = ["pyrealsense2"]
+stretch = ["hello-robot-stretch-body", "pyrender", "pyrealsense2", "pynput"]
[tool.ruff]
line-length = 110
diff --git a/tests/conftest.py b/tests/conftest.py
index 52006f331..2075c2aa6 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -13,13 +13,22 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+
import traceback
import pytest
+from serial import SerialException
+from lerobot import available_cameras, available_motors, available_robots
from lerobot.common.utils.utils import init_hydra_config
+from tests.utils import DEVICE, ROBOT_CONFIG_PATH_TEMPLATE, make_camera, make_motors_bus
-from .utils import DEVICE, ROBOT_CONFIG_PATH_TEMPLATE
+# Import fixture modules as plugins
+pytest_plugins = [
+ "tests.fixtures.dataset_factories",
+ "tests.fixtures.files",
+ "tests.fixtures.hub",
+]
def pytest_collection_finish():
@@ -28,6 +37,11 @@ def pytest_collection_finish():
@pytest.fixture
def is_robot_available(robot_type):
+ if robot_type not in available_robots:
+ raise ValueError(
+ f"The robot type '{robot_type}' is not valid. Expected one of these '{available_robots}"
+ )
+
try:
from lerobot.common.robot_devices.robots.factory import make_robot
@@ -37,7 +51,76 @@ def is_robot_available(robot_type):
robot.connect()
del robot
return True
- except Exception:
- traceback.print_exc()
+
+ except Exception as e:
print(f"\nA {robot_type} robot is not available.")
+
+ if isinstance(e, ModuleNotFoundError):
+ print(f"\nInstall module '{e.name}'")
+ elif isinstance(e, SerialException):
+ print("\nNo physical motors bus detected.")
+ else:
+ traceback.print_exc()
+
+ return False
+
+
+@pytest.fixture
+def is_camera_available(camera_type):
+ if camera_type not in available_cameras:
+ raise ValueError(
+ f"The camera type '{camera_type}' is not valid. Expected one of these '{available_cameras}"
+ )
+
+ try:
+ camera = make_camera(camera_type)
+ camera.connect()
+ del camera
+ return True
+
+ except Exception as e:
+ print(f"\nA {camera_type} camera is not available.")
+
+ if isinstance(e, ModuleNotFoundError):
+ print(f"\nInstall module '{e.name}'")
+ elif isinstance(e, ValueError) and "camera_index" in e.args[0]:
+ print("\nNo physical camera detected.")
+ else:
+ traceback.print_exc()
+
return False
+
+
+@pytest.fixture
+def is_motor_available(motor_type):
+ if motor_type not in available_motors:
+ raise ValueError(
+ f"The motor type '{motor_type}' is not valid. Expected one of these '{available_motors}"
+ )
+
+ try:
+ motors_bus = make_motors_bus(motor_type)
+ motors_bus.connect()
+ del motors_bus
+ return True
+
+ except Exception as e:
+ print(f"\nA {motor_type} motor is not available.")
+
+ if isinstance(e, ModuleNotFoundError):
+ print(f"\nInstall module '{e.name}'")
+ elif isinstance(e, SerialException):
+ print("\nNo physical motors bus detected.")
+ else:
+ traceback.print_exc()
+
+ return False
+
+
+@pytest.fixture
+def patch_builtins_input(monkeypatch):
+ def print_text(text=None):
+ if text is not None:
+ print(text)
+
+ monkeypatch.setattr("builtins.input", print_text)
diff --git a/tests/data/save_policy_to_safetensors/dora_aloha_real_act_real/actions.safetensors b/tests/data/save_policy_to_safetensors/dora_aloha_real_act_aloha_real/actions.safetensors
similarity index 100%
rename from tests/data/save_policy_to_safetensors/dora_aloha_real_act_real/actions.safetensors
rename to tests/data/save_policy_to_safetensors/dora_aloha_real_act_aloha_real/actions.safetensors
diff --git a/tests/data/save_policy_to_safetensors/dora_aloha_real_act_real/grad_stats.safetensors b/tests/data/save_policy_to_safetensors/dora_aloha_real_act_aloha_real/grad_stats.safetensors
similarity index 100%
rename from tests/data/save_policy_to_safetensors/dora_aloha_real_act_real/grad_stats.safetensors
rename to tests/data/save_policy_to_safetensors/dora_aloha_real_act_aloha_real/grad_stats.safetensors
diff --git a/tests/data/save_policy_to_safetensors/dora_aloha_real_act_real/output_dict.safetensors b/tests/data/save_policy_to_safetensors/dora_aloha_real_act_aloha_real/output_dict.safetensors
similarity index 100%
rename from tests/data/save_policy_to_safetensors/dora_aloha_real_act_real/output_dict.safetensors
rename to tests/data/save_policy_to_safetensors/dora_aloha_real_act_aloha_real/output_dict.safetensors
diff --git a/tests/data/save_policy_to_safetensors/dora_aloha_real_act_real/param_stats.safetensors b/tests/data/save_policy_to_safetensors/dora_aloha_real_act_aloha_real/param_stats.safetensors
similarity index 100%
rename from tests/data/save_policy_to_safetensors/dora_aloha_real_act_real/param_stats.safetensors
rename to tests/data/save_policy_to_safetensors/dora_aloha_real_act_aloha_real/param_stats.safetensors
diff --git a/tests/data/save_policy_to_safetensors/dora_aloha_real_act_real_no_state/actions.safetensors b/tests/data/save_policy_to_safetensors/dora_aloha_real_act_real_no_state/actions.safetensors
deleted file mode 100644
index 2e26ef270..000000000
--- a/tests/data/save_policy_to_safetensors/dora_aloha_real_act_real_no_state/actions.safetensors
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:b5a9f73a2356aff9c717cdfd0d37a6da08b0cf2cc09c98edbc9492501b7f64a5
-size 5104
diff --git a/tests/data/save_policy_to_safetensors/dora_aloha_real_act_real_no_state/grad_stats.safetensors b/tests/data/save_policy_to_safetensors/dora_aloha_real_act_real_no_state/grad_stats.safetensors
deleted file mode 100644
index b959bc6e0..000000000
--- a/tests/data/save_policy_to_safetensors/dora_aloha_real_act_real_no_state/grad_stats.safetensors
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:28738b3cfad17af0ac5181effdd796acdf7953cd5bcca3f421a11ddfd6b0076f
-size 30800
diff --git a/tests/data/save_policy_to_safetensors/dora_aloha_real_act_real_no_state/output_dict.safetensors b/tests/data/save_policy_to_safetensors/dora_aloha_real_act_real_no_state/output_dict.safetensors
deleted file mode 100644
index 455834aa8..000000000
--- a/tests/data/save_policy_to_safetensors/dora_aloha_real_act_real_no_state/output_dict.safetensors
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:4bb8a197a40456fdbc16029126268e6bcef3eca1837d88235165dc7e14618bea
-size 68
diff --git a/tests/data/save_policy_to_safetensors/dora_aloha_real_act_real_no_state/param_stats.safetensors b/tests/data/save_policy_to_safetensors/dora_aloha_real_act_real_no_state/param_stats.safetensors
deleted file mode 100644
index d50fb31d9..000000000
--- a/tests/data/save_policy_to_safetensors/dora_aloha_real_act_real_no_state/param_stats.safetensors
+++ /dev/null
@@ -1,3 +0,0 @@
-version https://git-lfs.github.com/spec/v1
-oid sha256:bea60cce42d324f539dd3bca1e66b5ba6391838fdcadb00efc25f3240edb529a
-size 33600
diff --git a/tests/fixtures/constants.py b/tests/fixtures/constants.py
new file mode 100644
index 000000000..bfe6c339b
--- /dev/null
+++ b/tests/fixtures/constants.py
@@ -0,0 +1,29 @@
+from lerobot.common.datasets.lerobot_dataset import LEROBOT_HOME
+
+LEROBOT_TEST_DIR = LEROBOT_HOME / "_testing"
+DUMMY_REPO_ID = "dummy/repo"
+DUMMY_ROBOT_TYPE = "dummy_robot"
+DUMMY_MOTOR_FEATURES = {
+ "action": {
+ "dtype": "float32",
+ "shape": (6,),
+ "names": ["shoulder_pan", "shoulder_lift", "elbow_flex", "wrist_flex", "wrist_roll", "gripper"],
+ },
+ "state": {
+ "dtype": "float32",
+ "shape": (6,),
+ "names": ["shoulder_pan", "shoulder_lift", "elbow_flex", "wrist_flex", "wrist_roll", "gripper"],
+ },
+}
+DUMMY_CAMERA_FEATURES = {
+ "laptop": {"shape": (480, 640, 3), "names": ["height", "width", "channels"], "info": None},
+ "phone": {"shape": (480, 640, 3), "names": ["height", "width", "channels"], "info": None},
+}
+DEFAULT_FPS = 30
+DUMMY_VIDEO_INFO = {
+ "video.fps": DEFAULT_FPS,
+ "video.codec": "av1",
+ "video.pix_fmt": "yuv420p",
+ "video.is_depth_map": False,
+ "has_audio": False,
+}
diff --git a/tests/fixtures/dataset_factories.py b/tests/fixtures/dataset_factories.py
new file mode 100644
index 000000000..c28a11653
--- /dev/null
+++ b/tests/fixtures/dataset_factories.py
@@ -0,0 +1,396 @@
+import random
+from pathlib import Path
+from unittest.mock import patch
+
+import datasets
+import numpy as np
+import PIL.Image
+import pytest
+import torch
+
+from lerobot.common.datasets.lerobot_dataset import CODEBASE_VERSION, LeRobotDataset, LeRobotDatasetMetadata
+from lerobot.common.datasets.utils import (
+ DEFAULT_CHUNK_SIZE,
+ DEFAULT_FEATURES,
+ DEFAULT_PARQUET_PATH,
+ DEFAULT_VIDEO_PATH,
+ get_hf_features_from_features,
+ hf_transform_to_torch,
+)
+from tests.fixtures.constants import (
+ DEFAULT_FPS,
+ DUMMY_CAMERA_FEATURES,
+ DUMMY_MOTOR_FEATURES,
+ DUMMY_REPO_ID,
+ DUMMY_ROBOT_TYPE,
+ DUMMY_VIDEO_INFO,
+)
+
+
+def get_task_index(task_dicts: dict, task: str) -> int:
+ tasks = {d["task_index"]: d["task"] for d in task_dicts}
+ task_to_task_index = {task: task_idx for task_idx, task in tasks.items()}
+ return task_to_task_index[task]
+
+
+@pytest.fixture(scope="session")
+def img_tensor_factory():
+ def _create_img_tensor(height=100, width=100, channels=3, dtype=torch.float32) -> torch.Tensor:
+ return torch.rand((channels, height, width), dtype=dtype)
+
+ return _create_img_tensor
+
+
+@pytest.fixture(scope="session")
+def img_array_factory():
+ def _create_img_array(height=100, width=100, channels=3, dtype=np.uint8) -> np.ndarray:
+ if np.issubdtype(dtype, np.unsignedinteger):
+ # Int array in [0, 255] range
+ img_array = np.random.randint(0, 256, size=(height, width, channels), dtype=dtype)
+ elif np.issubdtype(dtype, np.floating):
+ # Float array in [0, 1] range
+ img_array = np.random.rand(height, width, channels).astype(dtype)
+ else:
+ raise ValueError(dtype)
+ return img_array
+
+ return _create_img_array
+
+
+@pytest.fixture(scope="session")
+def img_factory(img_array_factory):
+ def _create_img(height=100, width=100) -> PIL.Image.Image:
+ img_array = img_array_factory(height=height, width=width)
+ return PIL.Image.fromarray(img_array)
+
+ return _create_img
+
+
+@pytest.fixture(scope="session")
+def features_factory():
+ def _create_features(
+ motor_features: dict = DUMMY_MOTOR_FEATURES,
+ camera_features: dict = DUMMY_CAMERA_FEATURES,
+ use_videos: bool = True,
+ ) -> dict:
+ if use_videos:
+ camera_ft = {
+ key: {"dtype": "video", **ft, **DUMMY_VIDEO_INFO} for key, ft in camera_features.items()
+ }
+ else:
+ camera_ft = {key: {"dtype": "image", **ft} for key, ft in camera_features.items()}
+ return {
+ **motor_features,
+ **camera_ft,
+ **DEFAULT_FEATURES,
+ }
+
+ return _create_features
+
+
+@pytest.fixture(scope="session")
+def info_factory(features_factory):
+ def _create_info(
+ codebase_version: str = CODEBASE_VERSION,
+ fps: int = DEFAULT_FPS,
+ robot_type: str = DUMMY_ROBOT_TYPE,
+ total_episodes: int = 0,
+ total_frames: int = 0,
+ total_tasks: int = 0,
+ total_videos: int = 0,
+ total_chunks: int = 0,
+ chunks_size: int = DEFAULT_CHUNK_SIZE,
+ data_path: str = DEFAULT_PARQUET_PATH,
+ video_path: str = DEFAULT_VIDEO_PATH,
+ motor_features: dict = DUMMY_MOTOR_FEATURES,
+ camera_features: dict = DUMMY_CAMERA_FEATURES,
+ use_videos: bool = True,
+ ) -> dict:
+ features = features_factory(motor_features, camera_features, use_videos)
+ return {
+ "codebase_version": codebase_version,
+ "robot_type": robot_type,
+ "total_episodes": total_episodes,
+ "total_frames": total_frames,
+ "total_tasks": total_tasks,
+ "total_videos": total_videos,
+ "total_chunks": total_chunks,
+ "chunks_size": chunks_size,
+ "fps": fps,
+ "splits": {},
+ "data_path": data_path,
+ "video_path": video_path if use_videos else None,
+ "features": features,
+ }
+
+ return _create_info
+
+
+@pytest.fixture(scope="session")
+def stats_factory():
+ def _create_stats(
+ features: dict[str] | None = None,
+ ) -> dict:
+ stats = {}
+ for key, ft in features.items():
+ shape = ft["shape"]
+ dtype = ft["dtype"]
+ if dtype in ["image", "video"]:
+ stats[key] = {
+ "max": np.full((3, 1, 1), 1, dtype=np.float32).tolist(),
+ "mean": np.full((3, 1, 1), 0.5, dtype=np.float32).tolist(),
+ "min": np.full((3, 1, 1), 0, dtype=np.float32).tolist(),
+ "std": np.full((3, 1, 1), 0.25, dtype=np.float32).tolist(),
+ }
+ else:
+ stats[key] = {
+ "max": np.full(shape, 1, dtype=dtype).tolist(),
+ "mean": np.full(shape, 0.5, dtype=dtype).tolist(),
+ "min": np.full(shape, 0, dtype=dtype).tolist(),
+ "std": np.full(shape, 0.25, dtype=dtype).tolist(),
+ }
+ return stats
+
+ return _create_stats
+
+
+@pytest.fixture(scope="session")
+def tasks_factory():
+ def _create_tasks(total_tasks: int = 3) -> int:
+ tasks_list = []
+ for i in range(total_tasks):
+ task_dict = {"task_index": i, "task": f"Perform action {i}."}
+ tasks_list.append(task_dict)
+ return tasks_list
+
+ return _create_tasks
+
+
+@pytest.fixture(scope="session")
+def episodes_factory(tasks_factory):
+ def _create_episodes(
+ total_episodes: int = 3,
+ total_frames: int = 400,
+ tasks: dict | None = None,
+ multi_task: bool = False,
+ ):
+ if total_episodes <= 0 or total_frames <= 0:
+ raise ValueError("num_episodes and total_length must be positive integers.")
+ if total_frames < total_episodes:
+ raise ValueError("total_length must be greater than or equal to num_episodes.")
+
+ if not tasks:
+ min_tasks = 2 if multi_task else 1
+ total_tasks = random.randint(min_tasks, total_episodes)
+ tasks = tasks_factory(total_tasks)
+
+ if total_episodes < len(tasks) and not multi_task:
+ raise ValueError("The number of tasks should be less than the number of episodes.")
+
+ # Generate random lengths that sum up to total_length
+ lengths = np.random.multinomial(total_frames, [1 / total_episodes] * total_episodes).tolist()
+
+ tasks_list = [task_dict["task"] for task_dict in tasks]
+ num_tasks_available = len(tasks_list)
+
+ episodes_list = []
+ remaining_tasks = tasks_list.copy()
+ for ep_idx in range(total_episodes):
+ num_tasks_in_episode = random.randint(1, min(3, num_tasks_available)) if multi_task else 1
+ tasks_to_sample = remaining_tasks if remaining_tasks else tasks_list
+ episode_tasks = random.sample(tasks_to_sample, min(num_tasks_in_episode, len(tasks_to_sample)))
+ if remaining_tasks:
+ for task in episode_tasks:
+ remaining_tasks.remove(task)
+
+ episodes_list.append(
+ {
+ "episode_index": ep_idx,
+ "tasks": episode_tasks,
+ "length": lengths[ep_idx],
+ }
+ )
+
+ return episodes_list
+
+ return _create_episodes
+
+
+@pytest.fixture(scope="session")
+def hf_dataset_factory(features_factory, tasks_factory, episodes_factory, img_array_factory):
+ def _create_hf_dataset(
+ features: dict | None = None,
+ tasks: list[dict] | None = None,
+ episodes: list[dict] | None = None,
+ fps: int = DEFAULT_FPS,
+ ) -> datasets.Dataset:
+ if not tasks:
+ tasks = tasks_factory()
+ if not episodes:
+ episodes = episodes_factory()
+ if not features:
+ features = features_factory()
+
+ timestamp_col = np.array([], dtype=np.float32)
+ frame_index_col = np.array([], dtype=np.int64)
+ episode_index_col = np.array([], dtype=np.int64)
+ task_index = np.array([], dtype=np.int64)
+ for ep_dict in episodes:
+ timestamp_col = np.concatenate((timestamp_col, np.arange(ep_dict["length"]) / fps))
+ frame_index_col = np.concatenate((frame_index_col, np.arange(ep_dict["length"], dtype=int)))
+ episode_index_col = np.concatenate(
+ (episode_index_col, np.full(ep_dict["length"], ep_dict["episode_index"], dtype=int))
+ )
+ ep_task_index = get_task_index(tasks, ep_dict["tasks"][0])
+ task_index = np.concatenate((task_index, np.full(ep_dict["length"], ep_task_index, dtype=int)))
+
+ index_col = np.arange(len(episode_index_col))
+
+ robot_cols = {}
+ for key, ft in features.items():
+ if ft["dtype"] == "image":
+ robot_cols[key] = [
+ img_array_factory(height=ft["shapes"][1], width=ft["shapes"][0])
+ for _ in range(len(index_col))
+ ]
+ elif ft["shape"][0] > 1 and ft["dtype"] != "video":
+ robot_cols[key] = np.random.random((len(index_col), ft["shape"][0])).astype(ft["dtype"])
+
+ hf_features = get_hf_features_from_features(features)
+ dataset = datasets.Dataset.from_dict(
+ {
+ **robot_cols,
+ "timestamp": timestamp_col,
+ "frame_index": frame_index_col,
+ "episode_index": episode_index_col,
+ "index": index_col,
+ "task_index": task_index,
+ },
+ features=hf_features,
+ )
+ dataset.set_transform(hf_transform_to_torch)
+ return dataset
+
+ return _create_hf_dataset
+
+
+@pytest.fixture(scope="session")
+def lerobot_dataset_metadata_factory(
+ info_factory,
+ stats_factory,
+ tasks_factory,
+ episodes_factory,
+ mock_snapshot_download_factory,
+):
+ def _create_lerobot_dataset_metadata(
+ root: Path,
+ repo_id: str = DUMMY_REPO_ID,
+ info: dict | None = None,
+ stats: dict | None = None,
+ tasks: list[dict] | None = None,
+ episodes: list[dict] | None = None,
+ local_files_only: bool = False,
+ ) -> LeRobotDatasetMetadata:
+ if not info:
+ info = info_factory()
+ if not stats:
+ stats = stats_factory(features=info["features"])
+ if not tasks:
+ tasks = tasks_factory(total_tasks=info["total_tasks"])
+ if not episodes:
+ episodes = episodes_factory(
+ total_episodes=info["total_episodes"], total_frames=info["total_frames"], tasks=tasks
+ )
+
+ mock_snapshot_download = mock_snapshot_download_factory(
+ info=info,
+ stats=stats,
+ tasks=tasks,
+ episodes=episodes,
+ )
+ with (
+ patch(
+ "lerobot.common.datasets.lerobot_dataset.get_hub_safe_version"
+ ) as mock_get_hub_safe_version_patch,
+ patch(
+ "lerobot.common.datasets.lerobot_dataset.snapshot_download"
+ ) as mock_snapshot_download_patch,
+ ):
+ mock_get_hub_safe_version_patch.side_effect = lambda repo_id, version: version
+ mock_snapshot_download_patch.side_effect = mock_snapshot_download
+
+ return LeRobotDatasetMetadata(repo_id=repo_id, root=root, local_files_only=local_files_only)
+
+ return _create_lerobot_dataset_metadata
+
+
+@pytest.fixture(scope="session")
+def lerobot_dataset_factory(
+ info_factory,
+ stats_factory,
+ tasks_factory,
+ episodes_factory,
+ hf_dataset_factory,
+ mock_snapshot_download_factory,
+ lerobot_dataset_metadata_factory,
+):
+ def _create_lerobot_dataset(
+ root: Path,
+ repo_id: str = DUMMY_REPO_ID,
+ total_episodes: int = 3,
+ total_frames: int = 150,
+ total_tasks: int = 1,
+ multi_task: bool = False,
+ info: dict | None = None,
+ stats: dict | None = None,
+ tasks: list[dict] | None = None,
+ episode_dicts: list[dict] | None = None,
+ hf_dataset: datasets.Dataset | None = None,
+ **kwargs,
+ ) -> LeRobotDataset:
+ if not info:
+ info = info_factory(
+ total_episodes=total_episodes, total_frames=total_frames, total_tasks=total_tasks
+ )
+ if not stats:
+ stats = stats_factory(features=info["features"])
+ if not tasks:
+ tasks = tasks_factory(total_tasks=info["total_tasks"])
+ if not episode_dicts:
+ episode_dicts = episodes_factory(
+ total_episodes=info["total_episodes"],
+ total_frames=info["total_frames"],
+ tasks=tasks,
+ multi_task=multi_task,
+ )
+ if not hf_dataset:
+ hf_dataset = hf_dataset_factory(tasks=tasks, episodes=episode_dicts, fps=info["fps"])
+
+ mock_snapshot_download = mock_snapshot_download_factory(
+ info=info,
+ stats=stats,
+ tasks=tasks,
+ episodes=episode_dicts,
+ hf_dataset=hf_dataset,
+ )
+ mock_metadata = lerobot_dataset_metadata_factory(
+ root=root,
+ repo_id=repo_id,
+ info=info,
+ stats=stats,
+ tasks=tasks,
+ episodes=episode_dicts,
+ local_files_only=kwargs.get("local_files_only", False),
+ )
+ with (
+ patch("lerobot.common.datasets.lerobot_dataset.LeRobotDatasetMetadata") as mock_metadata_patch,
+ patch(
+ "lerobot.common.datasets.lerobot_dataset.snapshot_download"
+ ) as mock_snapshot_download_patch,
+ ):
+ mock_metadata_patch.return_value = mock_metadata
+ mock_snapshot_download_patch.side_effect = mock_snapshot_download
+
+ return LeRobotDataset(repo_id=repo_id, root=root, **kwargs)
+
+ return _create_lerobot_dataset
diff --git a/tests/fixtures/files.py b/tests/fixtures/files.py
new file mode 100644
index 000000000..5fe8a3148
--- /dev/null
+++ b/tests/fixtures/files.py
@@ -0,0 +1,114 @@
+import json
+from pathlib import Path
+
+import datasets
+import jsonlines
+import pyarrow.compute as pc
+import pyarrow.parquet as pq
+import pytest
+
+from lerobot.common.datasets.utils import EPISODES_PATH, INFO_PATH, STATS_PATH, TASKS_PATH
+
+
+@pytest.fixture(scope="session")
+def info_path(info_factory):
+ def _create_info_json_file(dir: Path, info: dict | None = None) -> Path:
+ if not info:
+ info = info_factory()
+ fpath = dir / INFO_PATH
+ fpath.parent.mkdir(parents=True, exist_ok=True)
+ with open(fpath, "w") as f:
+ json.dump(info, f, indent=4, ensure_ascii=False)
+ return fpath
+
+ return _create_info_json_file
+
+
+@pytest.fixture(scope="session")
+def stats_path(stats_factory):
+ def _create_stats_json_file(dir: Path, stats: dict | None = None) -> Path:
+ if not stats:
+ stats = stats_factory()
+ fpath = dir / STATS_PATH
+ fpath.parent.mkdir(parents=True, exist_ok=True)
+ with open(fpath, "w") as f:
+ json.dump(stats, f, indent=4, ensure_ascii=False)
+ return fpath
+
+ return _create_stats_json_file
+
+
+@pytest.fixture(scope="session")
+def tasks_path(tasks_factory):
+ def _create_tasks_jsonl_file(dir: Path, tasks: list | None = None) -> Path:
+ if not tasks:
+ tasks = tasks_factory()
+ fpath = dir / TASKS_PATH
+ fpath.parent.mkdir(parents=True, exist_ok=True)
+ with jsonlines.open(fpath, "w") as writer:
+ writer.write_all(tasks)
+ return fpath
+
+ return _create_tasks_jsonl_file
+
+
+@pytest.fixture(scope="session")
+def episode_path(episodes_factory):
+ def _create_episodes_jsonl_file(dir: Path, episodes: list | None = None) -> Path:
+ if not episodes:
+ episodes = episodes_factory()
+ fpath = dir / EPISODES_PATH
+ fpath.parent.mkdir(parents=True, exist_ok=True)
+ with jsonlines.open(fpath, "w") as writer:
+ writer.write_all(episodes)
+ return fpath
+
+ return _create_episodes_jsonl_file
+
+
+@pytest.fixture(scope="session")
+def single_episode_parquet_path(hf_dataset_factory, info_factory):
+ def _create_single_episode_parquet(
+ dir: Path, ep_idx: int = 0, hf_dataset: datasets.Dataset | None = None, info: dict | None = None
+ ) -> Path:
+ if not info:
+ info = info_factory()
+ if hf_dataset is None:
+ hf_dataset = hf_dataset_factory()
+
+ data_path = info["data_path"]
+ chunks_size = info["chunks_size"]
+ ep_chunk = ep_idx // chunks_size
+ fpath = dir / data_path.format(episode_chunk=ep_chunk, episode_index=ep_idx)
+ fpath.parent.mkdir(parents=True, exist_ok=True)
+ table = hf_dataset.data.table
+ ep_table = table.filter(pc.equal(table["episode_index"], ep_idx))
+ pq.write_table(ep_table, fpath)
+ return fpath
+
+ return _create_single_episode_parquet
+
+
+@pytest.fixture(scope="session")
+def multi_episode_parquet_path(hf_dataset_factory, info_factory):
+ def _create_multi_episode_parquet(
+ dir: Path, hf_dataset: datasets.Dataset | None = None, info: dict | None = None
+ ) -> Path:
+ if not info:
+ info = info_factory()
+ if hf_dataset is None:
+ hf_dataset = hf_dataset_factory()
+
+ data_path = info["data_path"]
+ chunks_size = info["chunks_size"]
+ total_episodes = info["total_episodes"]
+ for ep_idx in range(total_episodes):
+ ep_chunk = ep_idx // chunks_size
+ fpath = dir / data_path.format(episode_chunk=ep_chunk, episode_index=ep_idx)
+ fpath.parent.mkdir(parents=True, exist_ok=True)
+ table = hf_dataset.data.table
+ ep_table = table.filter(pc.equal(table["episode_index"], ep_idx))
+ pq.write_table(ep_table, fpath)
+ return dir / "data"
+
+ return _create_multi_episode_parquet
diff --git a/tests/fixtures/hub.py b/tests/fixtures/hub.py
new file mode 100644
index 000000000..351768c04
--- /dev/null
+++ b/tests/fixtures/hub.py
@@ -0,0 +1,105 @@
+from pathlib import Path
+
+import datasets
+import pytest
+from huggingface_hub.utils import filter_repo_objects
+
+from lerobot.common.datasets.utils import EPISODES_PATH, INFO_PATH, STATS_PATH, TASKS_PATH
+from tests.fixtures.constants import LEROBOT_TEST_DIR
+
+
+@pytest.fixture(scope="session")
+def mock_snapshot_download_factory(
+ info_factory,
+ info_path,
+ stats_factory,
+ stats_path,
+ tasks_factory,
+ tasks_path,
+ episodes_factory,
+ episode_path,
+ single_episode_parquet_path,
+ hf_dataset_factory,
+):
+ """
+ This factory allows to patch snapshot_download such that when called, it will create expected files rather
+ than making calls to the hub api. Its design allows to pass explicitly files which you want to be created.
+ """
+
+ def _mock_snapshot_download_func(
+ info: dict | None = None,
+ stats: dict | None = None,
+ tasks: list[dict] | None = None,
+ episodes: list[dict] | None = None,
+ hf_dataset: datasets.Dataset | None = None,
+ ):
+ if not info:
+ info = info_factory()
+ if not stats:
+ stats = stats_factory(features=info["features"])
+ if not tasks:
+ tasks = tasks_factory(total_tasks=info["total_tasks"])
+ if not episodes:
+ episodes = episodes_factory(
+ total_episodes=info["total_episodes"], total_frames=info["total_frames"], tasks=tasks
+ )
+ if not hf_dataset:
+ hf_dataset = hf_dataset_factory(tasks=tasks, episodes=episodes, fps=info["fps"])
+
+ def _extract_episode_index_from_path(fpath: str) -> int:
+ path = Path(fpath)
+ if path.suffix == ".parquet" and path.stem.startswith("episode_"):
+ episode_index = int(path.stem[len("episode_") :]) # 'episode_000000' -> 0
+ return episode_index
+ else:
+ return None
+
+ def _mock_snapshot_download(
+ repo_id: str,
+ local_dir: str | Path | None = None,
+ allow_patterns: str | list[str] | None = None,
+ ignore_patterns: str | list[str] | None = None,
+ *args,
+ **kwargs,
+ ) -> str:
+ if not local_dir:
+ local_dir = LEROBOT_TEST_DIR
+
+ # List all possible files
+ all_files = []
+ meta_files = [INFO_PATH, STATS_PATH, TASKS_PATH, EPISODES_PATH]
+ all_files.extend(meta_files)
+
+ data_files = []
+ for episode_dict in episodes:
+ ep_idx = episode_dict["episode_index"]
+ ep_chunk = ep_idx // info["chunks_size"]
+ data_path = info["data_path"].format(episode_chunk=ep_chunk, episode_index=ep_idx)
+ data_files.append(data_path)
+ all_files.extend(data_files)
+
+ allowed_files = filter_repo_objects(
+ all_files, allow_patterns=allow_patterns, ignore_patterns=ignore_patterns
+ )
+
+ # Create allowed files
+ for rel_path in allowed_files:
+ if rel_path.startswith("data/"):
+ episode_index = _extract_episode_index_from_path(rel_path)
+ if episode_index is not None:
+ _ = single_episode_parquet_path(local_dir, episode_index, hf_dataset, info)
+ if rel_path == INFO_PATH:
+ _ = info_path(local_dir, info)
+ elif rel_path == STATS_PATH:
+ _ = stats_path(local_dir, stats)
+ elif rel_path == TASKS_PATH:
+ _ = tasks_path(local_dir, tasks)
+ elif rel_path == EPISODES_PATH:
+ _ = episode_path(local_dir, episodes)
+ else:
+ pass
+ return str(local_dir)
+
+ return _mock_snapshot_download
+
+ return _mock_snapshot_download_func
diff --git a/tests/mock_cv2.py b/tests/mock_cv2.py
new file mode 100644
index 000000000..3f3f9e343
--- /dev/null
+++ b/tests/mock_cv2.py
@@ -0,0 +1,83 @@
+from functools import cache
+
+import numpy as np
+
+CAP_PROP_FPS = 5
+CAP_PROP_FRAME_WIDTH = 3
+CAP_PROP_FRAME_HEIGHT = 4
+COLOR_RGB2BGR = 4
+COLOR_BGR2RGB = 4
+
+ROTATE_90_COUNTERCLOCKWISE = 2
+ROTATE_90_CLOCKWISE = 0
+ROTATE_180 = 1
+
+
+@cache
+def _generate_image(width: int, height: int):
+ return np.random.randint(0, 256, size=(height, width, 3), dtype=np.uint8)
+
+
+def cvtColor(color_image, color_convertion): # noqa: N802
+ if color_convertion in [COLOR_RGB2BGR, COLOR_BGR2RGB]:
+ return color_image[:, :, [2, 1, 0]]
+ else:
+ raise NotImplementedError(color_convertion)
+
+
+def rotate(color_image, rotation):
+ if rotation is None:
+ return color_image
+ elif rotation == ROTATE_90_CLOCKWISE:
+ return np.rot90(color_image, k=1)
+ elif rotation == ROTATE_180:
+ return np.rot90(color_image, k=2)
+ elif rotation == ROTATE_90_COUNTERCLOCKWISE:
+ return np.rot90(color_image, k=3)
+ else:
+ raise NotImplementedError(rotation)
+
+
+class VideoCapture:
+ def __init__(self, *args, **kwargs):
+ self._mock_dict = {
+ CAP_PROP_FPS: 30,
+ CAP_PROP_FRAME_WIDTH: 640,
+ CAP_PROP_FRAME_HEIGHT: 480,
+ }
+ self._is_opened = True
+
+ def isOpened(self): # noqa: N802
+ return self._is_opened
+
+ def set(self, propId: int, value: float) -> bool: # noqa: N803
+ if not self._is_opened:
+ raise RuntimeError("Camera is not opened")
+ self._mock_dict[propId] = value
+ return True
+
+ def get(self, propId: int) -> float: # noqa: N803
+ if not self._is_opened:
+ raise RuntimeError("Camera is not opened")
+ value = self._mock_dict[propId]
+ if value == 0:
+ if propId == CAP_PROP_FRAME_HEIGHT:
+ value = 480
+ elif propId == CAP_PROP_FRAME_WIDTH:
+ value = 640
+ return value
+
+ def read(self):
+ if not self._is_opened:
+ raise RuntimeError("Camera is not opened")
+ h = self.get(CAP_PROP_FRAME_HEIGHT)
+ w = self.get(CAP_PROP_FRAME_WIDTH)
+ ret = True
+ return ret, _generate_image(width=w, height=h)
+
+ def release(self):
+ self._is_opened = False
+
+ def __del__(self):
+ if self._is_opened:
+ self.release()
diff --git a/tests/mock_dynamixel_sdk.py b/tests/mock_dynamixel_sdk.py
new file mode 100644
index 000000000..a790dff05
--- /dev/null
+++ b/tests/mock_dynamixel_sdk.py
@@ -0,0 +1,94 @@
+"""Mocked classes and functions from dynamixel_sdk to allow for continuous integration
+and testing code logic that requires hardware and devices (e.g. robot arms, cameras)
+
+Warning: These mocked versions are minimalist. They do not exactly mock every behaviors
+from the original classes and functions (e.g. return types might be None instead of boolean).
+"""
+
+# from dynamixel_sdk import COMM_SUCCESS
+
+DEFAULT_BAUDRATE = 9_600
+COMM_SUCCESS = 0 # tx or rx packet communication success
+
+
+def convert_to_bytes(value, bytes):
+ # TODO(rcadene): remove need to mock `convert_to_bytes` by implemented the inverse transform
+ # `convert_bytes_to_value`
+ del bytes # unused
+ return value
+
+
+def get_default_motor_values(motor_index):
+ return {
+ # Key (int) are from X_SERIES_CONTROL_TABLE
+ 7: motor_index, # ID
+ 8: DEFAULT_BAUDRATE, # Baud_rate
+ 10: 0, # Drive_Mode
+ 64: 0, # Torque_Enable
+ # Set 2560 since calibration values for Aloha gripper is between start_pos=2499 and end_pos=3144
+ # For other joints, 2560 will be autocorrected to be in calibration range
+ 132: 2560, # Present_Position
+ }
+
+
+class PortHandler:
+ def __init__(self, port):
+ self.port = port
+ # factory default baudrate
+ self.baudrate = DEFAULT_BAUDRATE
+
+ def openPort(self): # noqa: N802
+ return True
+
+ def closePort(self): # noqa: N802
+ pass
+
+ def setPacketTimeoutMillis(self, timeout_ms): # noqa: N802
+ del timeout_ms # unused
+
+ def getBaudRate(self): # noqa: N802
+ return self.baudrate
+
+ def setBaudRate(self, baudrate): # noqa: N802
+ self.baudrate = baudrate
+
+
+class PacketHandler:
+ def __init__(self, protocol_version):
+ del protocol_version # unused
+ # Use packet_handler.data to communicate across Read and Write
+ self.data = {}
+
+
+class GroupSyncRead:
+ def __init__(self, port_handler, packet_handler, address, bytes):
+ self.packet_handler = packet_handler
+
+ def addParam(self, motor_index): # noqa: N802
+ # Initialize motor default values
+ if motor_index not in self.packet_handler.data:
+ self.packet_handler.data[motor_index] = get_default_motor_values(motor_index)
+
+ def txRxPacket(self): # noqa: N802
+ return COMM_SUCCESS
+
+ def getData(self, index, address, bytes): # noqa: N802
+ return self.packet_handler.data[index][address]
+
+
+class GroupSyncWrite:
+ def __init__(self, port_handler, packet_handler, address, bytes):
+ self.packet_handler = packet_handler
+ self.address = address
+
+ def addParam(self, index, data): # noqa: N802
+ # Initialize motor default values
+ if index not in self.packet_handler.data:
+ self.packet_handler.data[index] = get_default_motor_values(index)
+ self.changeParam(index, data)
+
+ def txPacket(self): # noqa: N802
+ return COMM_SUCCESS
+
+ def changeParam(self, index, data): # noqa: N802
+ self.packet_handler.data[index][self.address] = data
diff --git a/tests/mock_pyrealsense2.py b/tests/mock_pyrealsense2.py
new file mode 100644
index 000000000..5a39fc2bf
--- /dev/null
+++ b/tests/mock_pyrealsense2.py
@@ -0,0 +1,135 @@
+import enum
+
+import numpy as np
+
+
+class stream(enum.Enum): # noqa: N801
+ color = 0
+ depth = 1
+
+
+class format(enum.Enum): # noqa: N801
+ rgb8 = 0
+ z16 = 1
+
+
+class config: # noqa: N801
+ def enable_device(self, device_id: str):
+ self.device_enabled = device_id
+
+ def enable_stream(self, stream_type: stream, width=None, height=None, color_format=None, fps=None):
+ self.stream_type = stream_type
+ # Overwrite default values when possible
+ self.width = 848 if width is None else width
+ self.height = 480 if height is None else height
+ self.color_format = format.rgb8 if color_format is None else color_format
+ self.fps = 30 if fps is None else fps
+
+
+class RSColorProfile:
+ def __init__(self, config):
+ self.config = config
+
+ def fps(self):
+ return self.config.fps
+
+ def width(self):
+ return self.config.width
+
+ def height(self):
+ return self.config.height
+
+
+class RSColorStream:
+ def __init__(self, config):
+ self.config = config
+
+ def as_video_stream_profile(self):
+ return RSColorProfile(self.config)
+
+
+class RSProfile:
+ def __init__(self, config):
+ self.config = config
+
+ def get_stream(self, color_format):
+ del color_format # unused
+ return RSColorStream(self.config)
+
+
+class pipeline: # noqa: N801
+ def __init__(self):
+ self.started = False
+ self.config = None
+
+ def start(self, config):
+ self.started = True
+ self.config = config
+ return RSProfile(self.config)
+
+ def stop(self):
+ if not self.started:
+ raise RuntimeError("You need to start the camera before stop.")
+ self.started = False
+ self.config = None
+
+ def wait_for_frames(self, timeout_ms=50000):
+ del timeout_ms # unused
+ return RSFrames(self.config)
+
+
+class RSFrames:
+ def __init__(self, config):
+ self.config = config
+
+ def get_color_frame(self):
+ return RSColorFrame(self.config)
+
+ def get_depth_frame(self):
+ return RSDepthFrame(self.config)
+
+
+class RSColorFrame:
+ def __init__(self, config):
+ self.config = config
+
+ def get_data(self):
+ data = np.ones((self.config.height, self.config.width, 3), dtype=np.uint8)
+ # Create a difference between rgb and bgr
+ data[:, :, 0] = 2
+ return data
+
+
+class RSDepthFrame:
+ def __init__(self, config):
+ self.config = config
+
+ def get_data(self):
+ return np.ones((self.config.height, self.config.width), dtype=np.uint16)
+
+
+class RSDevice:
+ def __init__(self):
+ pass
+
+ def get_info(self, camera_info) -> str:
+ del camera_info # unused
+ # return fake serial number
+ return "123456789"
+
+
+class context: # noqa: N801
+ def __init__(self):
+ pass
+
+ def query_devices(self):
+ return [RSDevice()]
+
+
+class camera_info: # noqa: N801
+ # fake name
+ name = "Intel RealSense D435I"
+
+ def __init__(self, serial_number):
+ del serial_number
+ pass
diff --git a/tests/mock_scservo_sdk.py b/tests/mock_scservo_sdk.py
new file mode 100644
index 000000000..596978c00
--- /dev/null
+++ b/tests/mock_scservo_sdk.py
@@ -0,0 +1,103 @@
+"""Mocked classes and functions from dynamixel_sdk to allow for continuous integration
+and testing code logic that requires hardware and devices (e.g. robot arms, cameras)
+
+Warning: These mocked versions are minimalist. They do not exactly mock every behaviors
+from the original classes and functions (e.g. return types might be None instead of boolean).
+"""
+
+# from dynamixel_sdk import COMM_SUCCESS
+
+DEFAULT_BAUDRATE = 1_000_000
+COMM_SUCCESS = 0 # tx or rx packet communication success
+
+
+def convert_to_bytes(value, bytes):
+ # TODO(rcadene): remove need to mock `convert_to_bytes` by implemented the inverse transform
+ # `convert_bytes_to_value`
+ del bytes # unused
+ return value
+
+
+def get_default_motor_values(motor_index):
+ return {
+ # Key (int) are from SCS_SERIES_CONTROL_TABLE
+ 5: motor_index, # ID
+ 6: DEFAULT_BAUDRATE, # Baud_rate
+ 10: 0, # Drive_Mode
+ 21: 32, # P_Coefficient
+ 22: 32, # D_Coefficient
+ 23: 0, # I_Coefficient
+ 40: 0, # Torque_Enable
+ 41: 254, # Acceleration
+ 31: -2047, # Offset
+ 33: 0, # Mode
+ 55: 1, # Lock
+ # Set 2560 since calibration values for Aloha gripper is between start_pos=2499 and end_pos=3144
+ # For other joints, 2560 will be autocorrected to be in calibration range
+ 56: 2560, # Present_Position
+ 58: 0, # Present_Speed
+ 69: 0, # Present_Current
+ 85: 150, # Maximum_Acceleration
+ }
+
+
+class PortHandler:
+ def __init__(self, port):
+ self.port = port
+ # factory default baudrate
+ self.baudrate = DEFAULT_BAUDRATE
+
+ def openPort(self): # noqa: N802
+ return True
+
+ def closePort(self): # noqa: N802
+ pass
+
+ def setPacketTimeoutMillis(self, timeout_ms): # noqa: N802
+ del timeout_ms # unused
+
+ def getBaudRate(self): # noqa: N802
+ return self.baudrate
+
+ def setBaudRate(self, baudrate): # noqa: N802
+ self.baudrate = baudrate
+
+
+class PacketHandler:
+ def __init__(self, protocol_version):
+ del protocol_version # unused
+ # Use packet_handler.data to communicate across Read and Write
+ self.data = {}
+
+
+class GroupSyncRead:
+ def __init__(self, port_handler, packet_handler, address, bytes):
+ self.packet_handler = packet_handler
+
+ def addParam(self, motor_index): # noqa: N802
+ # Initialize motor default values
+ if motor_index not in self.packet_handler.data:
+ self.packet_handler.data[motor_index] = get_default_motor_values(motor_index)
+
+ def txRxPacket(self): # noqa: N802
+ return COMM_SUCCESS
+
+ def getData(self, index, address, bytes): # noqa: N802
+ return self.packet_handler.data[index][address]
+
+
+class GroupSyncWrite:
+ def __init__(self, port_handler, packet_handler, address, bytes):
+ self.packet_handler = packet_handler
+ self.address = address
+
+ def addParam(self, index, data): # noqa: N802
+ if index not in self.packet_handler.data:
+ self.packet_handler.data[index] = get_default_motor_values(index)
+ self.changeParam(index, data)
+
+ def txPacket(self): # noqa: N802
+ return COMM_SUCCESS
+
+ def changeParam(self, index, data): # noqa: N802
+ self.packet_handler.data[index][self.address] = data
diff --git a/tests/scripts/save_image_transforms_to_safetensors.py b/tests/scripts/save_image_transforms_to_safetensors.py
index 9d024a013..1fa194e50 100644
--- a/tests/scripts/save_image_transforms_to_safetensors.py
+++ b/tests/scripts/save_image_transforms_to_safetensors.py
@@ -76,7 +76,7 @@ def main():
dataset = LeRobotDataset(DATASET_REPO_ID, image_transforms=None)
output_dir = Path(ARTIFACT_DIR)
output_dir.mkdir(parents=True, exist_ok=True)
- original_frame = dataset[0][dataset.camera_keys[0]]
+ original_frame = dataset[0][dataset.meta.camera_keys[0]]
save_single_transforms(original_frame, output_dir)
save_default_config_transform(original_frame, output_dir)
diff --git a/tests/scripts/save_policy_to_safetensors.py b/tests/scripts/save_policy_to_safetensors.py
index 5236b7ae5..29d0ae19d 100644
--- a/tests/scripts/save_policy_to_safetensors.py
+++ b/tests/scripts/save_policy_to_safetensors.py
@@ -38,7 +38,7 @@ def get_policy_stats(env_name, policy_name, extra_overrides):
)
set_global_seed(1337)
dataset = make_dataset(cfg)
- policy = make_policy(cfg, dataset_stats=dataset.stats)
+ policy = make_policy(cfg, dataset_stats=dataset.meta.stats)
policy.train()
optimizer, _ = make_optimizer_and_scheduler(cfg, policy)
diff --git a/tests/test_cameras.py b/tests/test_cameras.py
index 0d5d94425..67512779a 100644
--- a/tests/test_cameras.py
+++ b/tests/test_cameras.py
@@ -1,21 +1,32 @@
"""
-Tests meant to be used locally and launched manually.
+Tests for physical cameras and their mocked versions.
+If the physical camera is not connected to the computer, or not working,
+the test will be skipped.
-Example usage:
+Example of running a specific test:
```bash
pytest -sx tests/test_cameras.py::test_camera
```
+
+Example of running test on a real camera connected to the computer:
+```bash
+pytest -sx 'tests/test_cameras.py::test_camera[opencv-False]'
+pytest -sx 'tests/test_cameras.py::test_camera[intelrealsense-False]'
+```
+
+Example of running test on a mocked version of the camera:
+```bash
+pytest -sx 'tests/test_cameras.py::test_camera[opencv-True]'
+pytest -sx 'tests/test_cameras.py::test_camera[intelrealsense-True]'
+```
"""
import numpy as np
import pytest
-from lerobot import available_robots
-from lerobot.common.robot_devices.cameras.opencv import OpenCVCamera, save_images_from_cameras
from lerobot.common.robot_devices.utils import RobotDeviceAlreadyConnectedError, RobotDeviceNotConnectedError
-from tests.utils import require_robot
+from tests.utils import TEST_CAMERA_TYPES, make_camera, require_camera
-CAMERA_INDEX = 2
# Maximum absolute difference between two consecutive images recored by a camera.
# This value differs with respect to the camera.
MAX_PIXEL_DIFFERENCE = 25
@@ -25,9 +36,9 @@ def compute_max_pixel_difference(first_image, second_image):
return np.abs(first_image.astype(float) - second_image.astype(float)).max()
-@pytest.mark.parametrize("robot_type", available_robots)
-@require_robot
-def test_camera(request, robot_type):
+@pytest.mark.parametrize("camera_type, mock", TEST_CAMERA_TYPES)
+@require_camera
+def test_camera(request, camera_type, mock):
"""Test assumes that `camera.read()` returns the same image when called multiple times in a row.
So the environment should not change (you shouldnt be in front of the camera) and the camera should not be moving.
@@ -36,10 +47,12 @@ def test_camera(request, robot_type):
"""
# TODO(rcadene): measure fps in nightly?
# TODO(rcadene): test logs
- # TODO(rcadene): add compatibility with other camera APIs
+
+ if camera_type == "opencv" and not mock:
+ pytest.skip("TODO(rcadene): fix test for opencv physical camera")
# Test instantiating
- camera = OpenCVCamera(CAMERA_INDEX)
+ camera = make_camera(camera_type, mock=mock)
# Test reading, async reading, disconnecting before connecting raises an error
with pytest.raises(RobotDeviceNotConnectedError):
@@ -53,7 +66,7 @@ def test_camera(request, robot_type):
del camera
# Test connecting
- camera = OpenCVCamera(CAMERA_INDEX)
+ camera = make_camera(camera_type, mock=mock)
camera.connect()
assert camera.is_connected
assert camera.fps is not None
@@ -78,11 +91,14 @@ def test_camera(request, robot_type):
camera.read()
color_image = camera.read()
async_color_image = camera.async_read()
- print(
+ error_msg = (
"max_pixel_difference between read() and async_read()",
compute_max_pixel_difference(color_image, async_color_image),
)
- assert np.allclose(color_image, async_color_image, rtol=1e-5, atol=MAX_PIXEL_DIFFERENCE)
+ # TODO(rcadene): properly set `rtol`
+ np.testing.assert_allclose(
+ color_image, async_color_image, rtol=1e-5, atol=MAX_PIXEL_DIFFERENCE, err_msg=error_msg
+ )
# Test disconnecting
camera.disconnect()
@@ -90,29 +106,60 @@ def test_camera(request, robot_type):
assert camera.thread is None
# Test disconnecting with `__del__`
- camera = OpenCVCamera(CAMERA_INDEX)
+ camera = make_camera(camera_type, mock=mock)
camera.connect()
del camera
# Test acquiring a bgr image
- camera = OpenCVCamera(CAMERA_INDEX, color_mode="bgr")
+ camera = make_camera(camera_type, color_mode="bgr", mock=mock)
camera.connect()
assert camera.color_mode == "bgr"
bgr_color_image = camera.read()
- assert np.allclose(color_image, bgr_color_image[:, :, [2, 1, 0]], rtol=1e-5, atol=MAX_PIXEL_DIFFERENCE)
+ np.testing.assert_allclose(
+ color_image, bgr_color_image[:, :, [2, 1, 0]], rtol=1e-5, atol=MAX_PIXEL_DIFFERENCE, err_msg=error_msg
+ )
del camera
- # TODO(rcadene): Add a test for a camera that doesnt support fps=60 and raises an OSError
- # TODO(rcadene): Add a test for a camera that supports fps=60
+ # Test acquiring a rotated image
+ camera = make_camera(camera_type, mock=mock)
+ camera.connect()
+ ori_color_image = camera.read()
+ del camera
- # Test fps=10 raises an OSError
- camera = OpenCVCamera(CAMERA_INDEX, fps=10)
- with pytest.raises(OSError):
+ for rotation in [None, 90, 180, -90]:
+ camera = make_camera(camera_type, rotation=rotation, mock=mock)
camera.connect()
- del camera
+
+ if mock:
+ import tests.mock_cv2 as cv2
+ else:
+ import cv2
+
+ if rotation is None:
+ manual_rot_img = ori_color_image
+ assert camera.rotation is None
+ elif rotation == 90:
+ manual_rot_img = np.rot90(color_image, k=1)
+ assert camera.rotation == cv2.ROTATE_90_CLOCKWISE
+ elif rotation == 180:
+ manual_rot_img = np.rot90(color_image, k=2)
+ assert camera.rotation == cv2.ROTATE_180
+ elif rotation == -90:
+ manual_rot_img = np.rot90(color_image, k=3)
+ assert camera.rotation == cv2.ROTATE_90_COUNTERCLOCKWISE
+
+ rot_color_image = camera.read()
+
+ np.testing.assert_allclose(
+ rot_color_image, manual_rot_img, rtol=1e-5, atol=MAX_PIXEL_DIFFERENCE, err_msg=error_msg
+ )
+ del camera
+
+ # TODO(rcadene): Add a test for a camera that doesnt support fps=60 and raises an OSError
+ # TODO(rcadene): Add a test for a camera that supports fps=60
# Test width and height can be set
- camera = OpenCVCamera(CAMERA_INDEX, fps=30, width=1280, height=720)
+ camera = make_camera(camera_type, fps=30, width=1280, height=720, mock=mock)
camera.connect()
assert camera.fps == 30
assert camera.width == 1280
@@ -125,13 +172,20 @@ def test_camera(request, robot_type):
del camera
# Test not supported width and height raise an error
- camera = OpenCVCamera(CAMERA_INDEX, fps=30, width=0, height=0)
+ camera = make_camera(camera_type, fps=30, width=0, height=0, mock=mock)
with pytest.raises(OSError):
camera.connect()
del camera
-@pytest.mark.parametrize("robot_type", available_robots)
-@require_robot
-def test_save_images_from_cameras(tmpdir, request, robot_type):
- save_images_from_cameras(tmpdir, record_time_s=1)
+@pytest.mark.parametrize("camera_type, mock", TEST_CAMERA_TYPES)
+@require_camera
+def test_save_images_from_cameras(tmpdir, request, camera_type, mock):
+ # TODO(rcadene): refactor
+ if camera_type == "opencv":
+ from lerobot.common.robot_devices.cameras.opencv import save_images_from_cameras
+ elif camera_type == "intelrealsense":
+ from lerobot.common.robot_devices.cameras.intelrealsense import save_images_from_cameras
+
+ # Small `record_time_s` to speedup unit tests
+ save_images_from_cameras(tmpdir, record_time_s=0.02, mock=mock)
diff --git a/tests/test_control_robot.py b/tests/test_control_robot.py
index 406edeb4f..0ba737a8f 100644
--- a/tests/test_control_robot.py
+++ b/tests/test_control_robot.py
@@ -1,70 +1,448 @@
+"""
+Tests for physical robots and their mocked versions.
+If the physical robots are not connected to the computer, or not working,
+the test will be skipped.
+
+Example of running a specific test:
+```bash
+pytest -sx tests/test_control_robot.py::test_teleoperate
+```
+
+Example of running test on real robots connected to the computer:
+```bash
+pytest -sx 'tests/test_control_robot.py::test_teleoperate[koch-False]'
+pytest -sx 'tests/test_control_robot.py::test_teleoperate[koch_bimanual-False]'
+pytest -sx 'tests/test_control_robot.py::test_teleoperate[aloha-False]'
+```
+
+Example of running test on a mocked version of robots:
+```bash
+pytest -sx 'tests/test_control_robot.py::test_teleoperate[koch-True]'
+pytest -sx 'tests/test_control_robot.py::test_teleoperate[koch_bimanual-True]'
+pytest -sx 'tests/test_control_robot.py::test_teleoperate[aloha-True]'
+```
+"""
+
+import multiprocessing
from pathlib import Path
+from unittest.mock import patch
import pytest
-from lerobot import available_robots
+from lerobot.common.logger import Logger
from lerobot.common.policies.factory import make_policy
from lerobot.common.utils.utils import init_hydra_config
from lerobot.scripts.control_robot import calibrate, record, replay, teleoperate
+from lerobot.scripts.train import make_optimizer_and_scheduler
from tests.test_robots import make_robot
-from tests.utils import DEFAULT_CONFIG_PATH, DEVICE, require_robot
+from tests.utils import DEFAULT_CONFIG_PATH, DEVICE, TEST_ROBOT_TYPES, mock_calibration_dir, require_robot
-@pytest.mark.parametrize("robot_type", available_robots)
+@pytest.mark.parametrize("robot_type, mock", TEST_ROBOT_TYPES)
@require_robot
-def test_teleoperate(request, robot_type):
- robot = make_robot(robot_type)
+def test_teleoperate(tmpdir, request, robot_type, mock):
+ if mock and robot_type != "aloha":
+ request.getfixturevalue("patch_builtins_input")
+
+ # Create an empty calibration directory to trigger manual calibration
+ # and avoid writing calibration files in user .cache/calibration folder
+ tmpdir = Path(tmpdir)
+ calibration_dir = tmpdir / robot_type
+ mock_calibration_dir(calibration_dir)
+ overrides = [f"calibration_dir={calibration_dir}"]
+ else:
+ # Use the default .cache/calibration folder when mock=False
+ overrides = None
+
+ robot = make_robot(robot_type, overrides=overrides, mock=mock)
teleoperate(robot, teleop_time_s=1)
teleoperate(robot, fps=30, teleop_time_s=1)
teleoperate(robot, fps=60, teleop_time_s=1)
del robot
-@pytest.mark.parametrize("robot_type", available_robots)
+@pytest.mark.parametrize("robot_type, mock", TEST_ROBOT_TYPES)
@require_robot
-def test_calibrate(request, robot_type):
- robot = make_robot(robot_type)
- calibrate(robot)
+def test_calibrate(tmpdir, request, robot_type, mock):
+ if mock:
+ request.getfixturevalue("patch_builtins_input")
+
+ # Create an empty calibration directory to trigger manual calibration
+ tmpdir = Path(tmpdir)
+ calibration_dir = tmpdir / robot_type
+ overrides_calibration_dir = [f"calibration_dir={calibration_dir}"]
+
+ robot = make_robot(robot_type, overrides=overrides_calibration_dir, mock=mock)
+ calibrate(robot, arms=robot.available_arms)
del robot
-@pytest.mark.parametrize("robot_type", available_robots)
+@pytest.mark.parametrize("robot_type, mock", TEST_ROBOT_TYPES)
@require_robot
-def test_record_without_cameras(tmpdir, request, robot_type):
- root = Path(tmpdir)
+def test_record_without_cameras(tmpdir, request, robot_type, mock):
+ # Avoid using cameras
+ overrides = ["~cameras"]
+
+ if mock and robot_type != "aloha":
+ request.getfixturevalue("patch_builtins_input")
+
+ # Create an empty calibration directory to trigger manual calibration
+ # and avoid writing calibration files in user .cache/calibration folder
+ calibration_dir = Path(tmpdir) / robot_type
+ mock_calibration_dir(calibration_dir)
+ overrides.append(f"calibration_dir={calibration_dir}")
+
repo_id = "lerobot/debug"
+ root = Path(tmpdir) / "data" / repo_id
+ single_task = "Do something."
- robot = make_robot(robot_type, overrides=["~cameras"])
- record(robot, fps=30, root=root, repo_id=repo_id, warmup_time_s=1, episode_time_s=1, num_episodes=2)
+ robot = make_robot(robot_type, overrides=overrides, mock=mock)
+ record(
+ robot,
+ fps=30,
+ root=root,
+ repo_id=repo_id,
+ single_task=single_task,
+ warmup_time_s=1,
+ episode_time_s=1,
+ num_episodes=2,
+ run_compute_stats=False,
+ push_to_hub=False,
+ video=False,
+ play_sounds=False,
+ )
-@pytest.mark.parametrize("robot_type", available_robots)
+@pytest.mark.parametrize("robot_type, mock", TEST_ROBOT_TYPES)
@require_robot
-def test_record_and_replay_and_policy(tmpdir, request, robot_type):
+def test_record_and_replay_and_policy(tmpdir, request, robot_type, mock):
+ tmpdir = Path(tmpdir)
+
+ if mock and robot_type != "aloha":
+ request.getfixturevalue("patch_builtins_input")
+
+ # Create an empty calibration directory to trigger manual calibration
+ # and avoid writing calibration files in user .cache/calibration folder
+ calibration_dir = tmpdir / robot_type
+ mock_calibration_dir(calibration_dir)
+ overrides = [f"calibration_dir={calibration_dir}"]
+ else:
+ # Use the default .cache/calibration folder when mock=False or for aloha
+ overrides = None
+
env_name = "koch_real"
policy_name = "act_koch_real"
- root = Path(tmpdir)
repo_id = "lerobot/debug"
+ root = tmpdir / "data" / repo_id
+ single_task = "Do something."
- robot = make_robot(robot_type)
+ robot = make_robot(robot_type, overrides=overrides, mock=mock)
dataset = record(
- robot, fps=30, root=root, repo_id=repo_id, warmup_time_s=1, episode_time_s=1, num_episodes=2
+ robot,
+ root,
+ repo_id,
+ single_task,
+ fps=1,
+ warmup_time_s=0.5,
+ episode_time_s=1,
+ reset_time_s=1,
+ num_episodes=2,
+ push_to_hub=False,
+ # TODO(rcadene, aliberts): test video=True
+ video=False,
+ # TODO(rcadene): display cameras through cv2 sometimes crashes on mac
+ display_cameras=False,
+ play_sounds=False,
)
+ assert dataset.meta.total_episodes == 2
+ assert len(dataset) == 2
+
+ replay(robot, episode=0, fps=1, root=root, repo_id=repo_id, play_sounds=False)
- replay(robot, episode=0, fps=30, root=root, repo_id=repo_id)
+ # TODO(rcadene, aliberts): rethink this design
+ if robot_type == "aloha":
+ env_name = "aloha_real"
+ policy_name = "act_aloha_real"
+ elif robot_type in ["koch", "koch_bimanual"]:
+ env_name = "koch_real"
+ policy_name = "act_koch_real"
+ elif robot_type == "so100":
+ env_name = "so100_real"
+ policy_name = "act_so100_real"
+ elif robot_type == "moss":
+ env_name = "moss_real"
+ policy_name = "act_moss_real"
+ else:
+ raise NotImplementedError(robot_type)
+
+ overrides = [
+ f"env={env_name}",
+ f"policy={policy_name}",
+ f"device={DEVICE}",
+ ]
+
+ if robot_type == "koch_bimanual":
+ overrides += ["env.state_dim=12", "env.action_dim=12"]
+
+ overrides += ["wandb.enable=false"]
+ overrides += ["env.fps=1"]
cfg = init_hydra_config(
DEFAULT_CONFIG_PATH,
- overrides=[
- f"env={env_name}",
- f"policy={policy_name}",
- f"device={DEVICE}",
- ],
+ overrides=overrides,
+ )
+
+ policy = make_policy(hydra_cfg=cfg, dataset_stats=dataset.meta.stats)
+ optimizer, lr_scheduler = make_optimizer_and_scheduler(cfg, policy)
+ out_dir = tmpdir / "logger"
+ logger = Logger(cfg, out_dir, wandb_job_name="debug")
+ logger.save_checkpoint(
+ 0,
+ policy,
+ optimizer,
+ lr_scheduler,
+ identifier=0,
)
+ pretrained_policy_name_or_path = out_dir / "checkpoints/last/pretrained_model"
+
+ # In `examples/9_use_aloha.md`, we advise using `num_image_writer_processes=1`
+ # during inference, to reach constent fps, so we test this here.
+ if robot_type == "aloha":
+ num_image_writer_processes = 1
- policy = make_policy(hydra_cfg=cfg, dataset_stats=dataset.stats)
+ # `multiprocessing.set_start_method("spawn", force=True)` avoids a hanging issue
+ # before exiting pytest. However, it outputs the following error in the log:
+ # Traceback (most recent call last):
+ # File "", line 1, in
+ # File "/Users/rcadene/miniconda3/envs/lerobot/lib/python3.10/multiprocessing/spawn.py", line 116, in spawn_main
+ # exitcode = _main(fd, parent_sentinel)
+ # File "/Users/rcadene/miniconda3/envs/lerobot/lib/python3.10/multiprocessing/spawn.py", line 126, in _main
+ # self = reduction.pickle.load(from_parent)
+ # File "/Users/rcadene/miniconda3/envs/lerobot/lib/python3.10/multiprocessing/synchronize.py", line 110, in __setstate__
+ # self._semlock = _multiprocessing.SemLock._rebuild(*state)
+ # FileNotFoundError: [Errno 2] No such file or directory
+ # TODO(rcadene, aliberts): fix FileNotFoundError in multiprocessing
+ multiprocessing.set_start_method("spawn", force=True)
+ else:
+ num_image_writer_processes = 0
- record(robot, policy, cfg, run_time_s=1)
+ eval_repo_id = "lerobot/eval_debug"
+ eval_root = tmpdir / "data" / eval_repo_id
+
+ dataset = record(
+ robot,
+ eval_root,
+ eval_repo_id,
+ single_task,
+ pretrained_policy_name_or_path,
+ warmup_time_s=1,
+ episode_time_s=1,
+ reset_time_s=1,
+ num_episodes=2,
+ run_compute_stats=False,
+ push_to_hub=False,
+ video=False,
+ display_cameras=False,
+ play_sounds=False,
+ num_image_writer_processes=num_image_writer_processes,
+ )
+
+ assert dataset.num_episodes == 2
+ assert len(dataset) == 2
del robot
+
+
+@pytest.mark.parametrize("robot_type, mock", [("koch", True)])
+@require_robot
+def test_resume_record(tmpdir, request, robot_type, mock):
+ if mock and robot_type != "aloha":
+ request.getfixturevalue("patch_builtins_input")
+
+ # Create an empty calibration directory to trigger manual calibration
+ # and avoid writing calibration files in user .cache/calibration folder
+ calibration_dir = tmpdir / robot_type
+ mock_calibration_dir(calibration_dir)
+ overrides = [f"calibration_dir={calibration_dir}"]
+ else:
+ # Use the default .cache/calibration folder when mock=False or for aloha
+ overrides = []
+
+ robot = make_robot(robot_type, overrides=overrides, mock=mock)
+
+ repo_id = "lerobot/debug"
+ root = Path(tmpdir) / "data" / repo_id
+ single_task = "Do something."
+
+ record_kwargs = {
+ "robot": robot,
+ "root": root,
+ "repo_id": repo_id,
+ "single_task": single_task,
+ "fps": 1,
+ "warmup_time_s": 0,
+ "episode_time_s": 1,
+ "push_to_hub": False,
+ "video": False,
+ "display_cameras": False,
+ "play_sounds": False,
+ "run_compute_stats": False,
+ "local_files_only": True,
+ "num_episodes": 1,
+ }
+
+ dataset = record(**record_kwargs)
+ assert len(dataset) == 1, f"`dataset` should contain 1 frame, not {len(dataset)}"
+
+ with pytest.raises(FileExistsError):
+ # Dataset already exists, but resume=False by default
+ record(**record_kwargs)
+
+ dataset = record(**record_kwargs, resume=True)
+ assert len(dataset) == 2, f"`dataset` should contain 2 frames, not {len(dataset)}"
+
+
+@pytest.mark.parametrize("robot_type, mock", [("koch", True)])
+@require_robot
+def test_record_with_event_rerecord_episode(tmpdir, request, robot_type, mock):
+ if mock and robot_type != "aloha":
+ request.getfixturevalue("patch_builtins_input")
+
+ # Create an empty calibration directory to trigger manual calibration
+ # and avoid writing calibration files in user .cache/calibration folder
+ calibration_dir = tmpdir / robot_type
+ mock_calibration_dir(calibration_dir)
+ overrides = [f"calibration_dir={calibration_dir}"]
+ else:
+ # Use the default .cache/calibration folder when mock=False or for aloha
+ overrides = []
+
+ robot = make_robot(robot_type, overrides=overrides, mock=mock)
+ with patch("lerobot.scripts.control_robot.init_keyboard_listener") as mock_listener:
+ mock_events = {}
+ mock_events["exit_early"] = True
+ mock_events["rerecord_episode"] = True
+ mock_events["stop_recording"] = False
+ mock_listener.return_value = (None, mock_events)
+
+ repo_id = "lerobot/debug"
+ root = Path(tmpdir) / "data" / repo_id
+ single_task = "Do something."
+
+ dataset = record(
+ robot,
+ root,
+ repo_id,
+ single_task,
+ fps=1,
+ warmup_time_s=0,
+ episode_time_s=1,
+ num_episodes=1,
+ push_to_hub=False,
+ video=False,
+ display_cameras=False,
+ play_sounds=False,
+ run_compute_stats=False,
+ )
+
+ assert not mock_events["rerecord_episode"], "`rerecord_episode` wasn't properly reset to False"
+ assert not mock_events["exit_early"], "`exit_early` wasn't properly reset to False"
+ assert len(dataset) == 1, "`dataset` should contain only 1 frame"
+
+
+@pytest.mark.parametrize("robot_type, mock", [("koch", True)])
+@require_robot
+def test_record_with_event_exit_early(tmpdir, request, robot_type, mock):
+ if mock:
+ request.getfixturevalue("patch_builtins_input")
+
+ # Create an empty calibration directory to trigger manual calibration
+ # and avoid writing calibration files in user .cache/calibration folder
+ calibration_dir = tmpdir / robot_type
+ mock_calibration_dir(calibration_dir)
+ overrides = [f"calibration_dir={calibration_dir}"]
+ else:
+ # Use the default .cache/calibration folder when mock=False or for aloha
+ overrides = []
+
+ robot = make_robot(robot_type, overrides=overrides, mock=mock)
+ with patch("lerobot.scripts.control_robot.init_keyboard_listener") as mock_listener:
+ mock_events = {}
+ mock_events["exit_early"] = True
+ mock_events["rerecord_episode"] = False
+ mock_events["stop_recording"] = False
+ mock_listener.return_value = (None, mock_events)
+
+ repo_id = "lerobot/debug"
+ root = Path(tmpdir) / "data" / repo_id
+ single_task = "Do something."
+
+ dataset = record(
+ robot,
+ fps=2,
+ root=root,
+ single_task=single_task,
+ repo_id=repo_id,
+ warmup_time_s=0,
+ episode_time_s=1,
+ num_episodes=1,
+ push_to_hub=False,
+ video=False,
+ display_cameras=False,
+ play_sounds=False,
+ run_compute_stats=False,
+ )
+
+ assert not mock_events["exit_early"], "`exit_early` wasn't properly reset to False"
+ assert len(dataset) == 1, "`dataset` should contain only 1 frame"
+
+
+@pytest.mark.parametrize(
+ "robot_type, mock, num_image_writer_processes", [("koch", True, 0), ("koch", True, 1)]
+)
+@require_robot
+def test_record_with_event_stop_recording(tmpdir, request, robot_type, mock, num_image_writer_processes):
+ if mock:
+ request.getfixturevalue("patch_builtins_input")
+
+ # Create an empty calibration directory to trigger manual calibration
+ # and avoid writing calibration files in user .cache/calibration folder
+ calibration_dir = tmpdir / robot_type
+ mock_calibration_dir(calibration_dir)
+ overrides = [f"calibration_dir={calibration_dir}"]
+ else:
+ # Use the default .cache/calibration folder when mock=False or for aloha
+ overrides = []
+
+ robot = make_robot(robot_type, overrides=overrides, mock=mock)
+ with patch("lerobot.scripts.control_robot.init_keyboard_listener") as mock_listener:
+ mock_events = {}
+ mock_events["exit_early"] = True
+ mock_events["rerecord_episode"] = False
+ mock_events["stop_recording"] = True
+ mock_listener.return_value = (None, mock_events)
+
+ repo_id = "lerobot/debug"
+ root = Path(tmpdir) / "data" / repo_id
+ single_task = "Do something."
+
+ dataset = record(
+ robot,
+ root,
+ repo_id,
+ single_task=single_task,
+ fps=1,
+ warmup_time_s=0,
+ episode_time_s=1,
+ num_episodes=2,
+ push_to_hub=False,
+ video=False,
+ display_cameras=False,
+ play_sounds=False,
+ run_compute_stats=False,
+ num_image_writer_processes=num_image_writer_processes,
+ )
+
+ assert not mock_events["exit_early"], "`exit_early` wasn't properly reset to False"
+ assert len(dataset) == 1, "`dataset` should contain only 1 frame"
diff --git a/tests/test_datasets.py b/tests/test_datasets.py
index 7fe84bc57..9f3615871 100644
--- a/tests/test_datasets.py
+++ b/tests/test_datasets.py
@@ -33,18 +33,72 @@
get_stats_einops_patterns,
)
from lerobot.common.datasets.factory import make_dataset
-from lerobot.common.datasets.lerobot_dataset import LeRobotDataset, MultiLeRobotDataset
+from lerobot.common.datasets.lerobot_dataset import (
+ LeRobotDataset,
+ MultiLeRobotDataset,
+)
from lerobot.common.datasets.utils import (
create_branch,
flatten_dict,
hf_transform_to_torch,
- load_previous_and_future_frames,
unflatten_dict,
)
from lerobot.common.utils.utils import init_hydra_config, seeded_context
-from tests.utils import DEFAULT_CONFIG_PATH, DEVICE
+from tests.fixtures.constants import DUMMY_REPO_ID
+from tests.utils import DEFAULT_CONFIG_PATH, DEVICE, make_robot
+
+
+def test_same_attributes_defined(lerobot_dataset_factory, tmp_path):
+ """
+ Instantiate a LeRobotDataset both ways with '__init__()' and 'create()' and verify that instantiated
+ objects have the same sets of attributes defined.
+ """
+ # Instantiate both ways
+ robot = make_robot("koch", mock=True)
+ root_create = tmp_path / "create"
+ dataset_create = LeRobotDataset.create(repo_id=DUMMY_REPO_ID, fps=30, robot=robot, root=root_create)
+
+ root_init = tmp_path / "init"
+ dataset_init = lerobot_dataset_factory(root=root_init)
+
+ # Access the '_hub_version' cached_property in both instances to force its creation
+ _ = dataset_init.meta._hub_version
+ _ = dataset_create.meta._hub_version
+
+ init_attr = set(vars(dataset_init).keys())
+ create_attr = set(vars(dataset_create).keys())
+
+ assert init_attr == create_attr
+
+
+def test_dataset_initialization(lerobot_dataset_factory, tmp_path):
+ kwargs = {
+ "repo_id": DUMMY_REPO_ID,
+ "total_episodes": 10,
+ "total_frames": 400,
+ "episodes": [2, 5, 6],
+ }
+ dataset = lerobot_dataset_factory(root=tmp_path, **kwargs)
+ assert dataset.repo_id == kwargs["repo_id"]
+ assert dataset.meta.total_episodes == kwargs["total_episodes"]
+ assert dataset.meta.total_frames == kwargs["total_frames"]
+ assert dataset.episodes == kwargs["episodes"]
+ assert dataset.num_episodes == len(kwargs["episodes"])
+ assert dataset.num_frames == len(dataset)
+
+# TODO(aliberts):
+# - [ ] test various attributes & state from init and create
+# - [ ] test init with episodes and check num_frames
+# - [ ] test add_frame
+# - [ ] test add_episode
+# - [ ] test consolidate
+# - [ ] test push_to_hub
+# - [ ] test smaller methods
+
+
+@pytest.mark.skip("TODO after v2 migration / removing hydra")
@pytest.mark.parametrize(
"env_name, repo_id, policy_name",
lerobot.env_dataset_policy_triplets
@@ -67,7 +121,7 @@ def test_factory(env_name, repo_id, policy_name):
)
dataset = make_dataset(cfg)
delta_timestamps = dataset.delta_timestamps
- camera_keys = dataset.camera_keys
+ camera_keys = dataset.meta.camera_keys
item = dataset[0]
@@ -117,6 +171,7 @@ def test_factory(env_name, repo_id, policy_name):
# TODO(alexander-soare): If you're hunting for savings on testing time, this takes about 5 seconds.
+@pytest.mark.skip("TODO after v2 migration / removing hydra")
def test_multilerobotdataset_frames():
"""Check that all dataset frames are incorporated."""
# Note: use the image variants of the dataset to make the test approx 3x faster.
@@ -130,7 +185,7 @@ def test_multilerobotdataset_frames():
sub_datasets = [LeRobotDataset(repo_id) for repo_id in repo_ids]
dataset = MultiLeRobotDataset(repo_ids)
assert len(dataset) == sum(len(d) for d in sub_datasets)
- assert dataset.num_samples == sum(d.num_samples for d in sub_datasets)
+ assert dataset.num_frames == sum(d.num_frames for d in sub_datasets)
assert dataset.num_episodes == sum(d.num_episodes for d in sub_datasets)
# Run through all items of the LeRobotDatasets in parallel with the items of the MultiLerobotDataset and
@@ -149,6 +204,8 @@ def test_multilerobotdataset_frames():
assert torch.equal(sub_dataset_item[k], dataset_item[k])
+# TODO(aliberts, rcadene): Refactor and move this to a tests/test_compute_stats.py
+@pytest.mark.skip("TODO after v2 migration / removing hydra")
def test_compute_stats_on_xarm():
"""Check that the statistics are computed correctly according to the stats_patterns property.
@@ -197,7 +254,7 @@ def test_compute_stats_on_xarm():
assert torch.allclose(computed_stats[k]["max"], expected_stats[k]["max"])
# load stats used during training which are expected to match the ones returned by computed_stats
- loaded_stats = dataset.stats # noqa: F841
+ loaded_stats = dataset.meta.stats # noqa: F841
# TODO(rcadene): we can't test this because expected_stats is computed on a subset
# # test loaded stats match expected stats
@@ -208,72 +265,7 @@ def test_compute_stats_on_xarm():
# assert torch.allclose(loaded_stats[k]["max"], expected_stats[k]["max"])
-def test_load_previous_and_future_frames_within_tolerance():
- hf_dataset = Dataset.from_dict(
- {
- "timestamp": [0.1, 0.2, 0.3, 0.4, 0.5],
- "index": [0, 1, 2, 3, 4],
- "episode_index": [0, 0, 0, 0, 0],
- }
- )
- hf_dataset.set_transform(hf_transform_to_torch)
- episode_data_index = {
- "from": torch.tensor([0]),
- "to": torch.tensor([5]),
- }
- delta_timestamps = {"index": [-0.2, 0, 0.139]}
- tol = 0.04
- item = hf_dataset[2]
- item = load_previous_and_future_frames(item, hf_dataset, episode_data_index, delta_timestamps, tol)
- data, is_pad = item["index"], item["index_is_pad"]
- assert torch.equal(data, torch.tensor([0, 2, 3])), "Data does not match expected values"
- assert not is_pad.any(), "Unexpected padding detected"
-
-
-def test_load_previous_and_future_frames_outside_tolerance_inside_episode_range():
- hf_dataset = Dataset.from_dict(
- {
- "timestamp": [0.1, 0.2, 0.3, 0.4, 0.5],
- "index": [0, 1, 2, 3, 4],
- "episode_index": [0, 0, 0, 0, 0],
- }
- )
- hf_dataset.set_transform(hf_transform_to_torch)
- episode_data_index = {
- "from": torch.tensor([0]),
- "to": torch.tensor([5]),
- }
- delta_timestamps = {"index": [-0.2, 0, 0.141]}
- tol = 0.04
- item = hf_dataset[2]
- with pytest.raises(AssertionError):
- load_previous_and_future_frames(item, hf_dataset, episode_data_index, delta_timestamps, tol)
-
-
-def test_load_previous_and_future_frames_outside_tolerance_outside_episode_range():
- hf_dataset = Dataset.from_dict(
- {
- "timestamp": [0.1, 0.2, 0.3, 0.4, 0.5],
- "index": [0, 1, 2, 3, 4],
- "episode_index": [0, 0, 0, 0, 0],
- }
- )
- hf_dataset.set_transform(hf_transform_to_torch)
- episode_data_index = {
- "from": torch.tensor([0]),
- "to": torch.tensor([5]),
- }
- delta_timestamps = {"index": [-0.3, -0.24, 0, 0.26, 0.3]}
- tol = 0.04
- item = hf_dataset[2]
- item = load_previous_and_future_frames(item, hf_dataset, episode_data_index, delta_timestamps, tol)
- data, is_pad = item["index"], item["index_is_pad"]
- assert torch.equal(data, torch.tensor([0, 0, 2, 4, 4])), "Data does not match expected values"
- assert torch.equal(
- is_pad, torch.tensor([True, False, False, True, True])
- ), "Padding does not match expected values"
-
-
+# TODO(aliberts): Move to more appropriate location
def test_flatten_unflatten_dict():
d = {
"obs": {
@@ -297,6 +289,7 @@ def test_flatten_unflatten_dict():
assert json.dumps(original_d, sort_keys=True) == json.dumps(d, sort_keys=True), f"{original_d} != {d}"
+@pytest.mark.skip("TODO after v2 migration / removing hydra")
@pytest.mark.parametrize(
"repo_id",
[
@@ -308,12 +301,11 @@ def test_flatten_unflatten_dict():
# "lerobot/cmu_stretch",
],
)
+# TODO(rcadene, aliberts): all these tests fail locally on Mac M1, but not on Linux
def test_backward_compatibility(repo_id):
"""The artifacts for this test have been generated by `tests/scripts/save_dataset_to_safetensors.py`."""
- dataset = LeRobotDataset(
- repo_id,
- )
+ dataset = LeRobotDataset(repo_id)
test_dir = Path("tests/data/save_dataset_to_safetensors") / repo_id
@@ -369,6 +361,7 @@ def load_and_compare(i):
# load_and_compare(i - 1)
+@pytest.mark.skip("TODO after v2 migration / removing hydra")
def test_aggregate_stats():
"""Makes 3 basic datasets and checks that aggregate stats are computed correctly."""
with seeded_context(0):
diff --git a/tests/test_delta_timestamps.py b/tests/test_delta_timestamps.py
new file mode 100644
index 000000000..3c2e307f8
--- /dev/null
+++ b/tests/test_delta_timestamps.py
@@ -0,0 +1,256 @@
+import pytest
+import torch
+from datasets import Dataset
+
+from lerobot.common.datasets.utils import (
+ calculate_episode_data_index,
+ check_delta_timestamps,
+ check_timestamps_sync,
+ get_delta_indices,
+ hf_transform_to_torch,
+)
+from tests.fixtures.constants import DUMMY_MOTOR_FEATURES
+
+
+@pytest.fixture(scope="module")
+def synced_hf_dataset_factory(hf_dataset_factory):
+ def _create_synced_hf_dataset(fps: int = 30) -> Dataset:
+ return hf_dataset_factory(fps=fps)
+
+ return _create_synced_hf_dataset
+
+
+@pytest.fixture(scope="module")
+def unsynced_hf_dataset_factory(synced_hf_dataset_factory):
+ def _create_unsynced_hf_dataset(fps: int = 30, tolerance_s: float = 1e-4) -> Dataset:
+ hf_dataset = synced_hf_dataset_factory(fps=fps)
+ features = hf_dataset.features
+ df = hf_dataset.to_pandas()
+ dtype = df["timestamp"].dtype # This is to avoid pandas type warning
+ # Modify a single timestamp just outside tolerance
+ df.at[30, "timestamp"] = dtype.type(df.at[30, "timestamp"] + (tolerance_s * 1.1))
+ unsynced_hf_dataset = Dataset.from_pandas(df, features=features)
+ unsynced_hf_dataset.set_transform(hf_transform_to_torch)
+ return unsynced_hf_dataset
+
+ return _create_unsynced_hf_dataset
+
+
+@pytest.fixture(scope="module")
+def slightly_off_hf_dataset_factory(synced_hf_dataset_factory):
+ def _create_slightly_off_hf_dataset(fps: int = 30, tolerance_s: float = 1e-4) -> Dataset:
+ hf_dataset = synced_hf_dataset_factory(fps=fps)
+ features = hf_dataset.features
+ df = hf_dataset.to_pandas()
+ dtype = df["timestamp"].dtype # This is to avoid pandas type warning
+ # Modify a single timestamp just inside tolerance
+ df.at[30, "timestamp"] = dtype.type(df.at[30, "timestamp"] + (tolerance_s * 0.9))
+ unsynced_hf_dataset = Dataset.from_pandas(df, features=features)
+ unsynced_hf_dataset.set_transform(hf_transform_to_torch)
+ return unsynced_hf_dataset
+
+ return _create_slightly_off_hf_dataset
+
+
+@pytest.fixture(scope="module")
+def valid_delta_timestamps_factory():
+ def _create_valid_delta_timestamps(fps: int = 30, keys: list = DUMMY_MOTOR_FEATURES) -> dict:
+ delta_timestamps = {key: [i * (1 / fps) for i in range(-10, 10)] for key in keys}
+ return delta_timestamps
+
+ return _create_valid_delta_timestamps
+
+
+@pytest.fixture(scope="module")
+def invalid_delta_timestamps_factory(valid_delta_timestamps_factory):
+ def _create_invalid_delta_timestamps(
+ fps: int = 30, tolerance_s: float = 1e-4, keys: list = DUMMY_MOTOR_FEATURES
+ ) -> dict:
+ delta_timestamps = valid_delta_timestamps_factory(fps, keys)
+ # Modify a single timestamp just outside tolerance
+ for key in keys:
+ delta_timestamps[key][3] += tolerance_s * 1.1
+ return delta_timestamps
+
+ return _create_invalid_delta_timestamps
+
+
+@pytest.fixture(scope="module")
+def slightly_off_delta_timestamps_factory(valid_delta_timestamps_factory):
+ def _create_slightly_off_delta_timestamps(
+ fps: int = 30, tolerance_s: float = 1e-4, keys: list = DUMMY_MOTOR_FEATURES
+ ) -> dict:
+ delta_timestamps = valid_delta_timestamps_factory(fps, keys)
+ # Modify a single timestamp just inside tolerance
+ for key in delta_timestamps:
+ delta_timestamps[key][3] += tolerance_s * 0.9
+ delta_timestamps[key][-3] += tolerance_s * 0.9
+ return delta_timestamps
+
+ return _create_slightly_off_delta_timestamps
+
+
+@pytest.fixture(scope="module")
+def delta_indices(keys: list = DUMMY_MOTOR_FEATURES) -> dict:
+ return {key: list(range(-10, 10)) for key in keys}
+
+
+def test_check_timestamps_sync_synced(synced_hf_dataset_factory):
+ fps = 30
+ tolerance_s = 1e-4
+ synced_hf_dataset = synced_hf_dataset_factory(fps)
+ episode_data_index = calculate_episode_data_index(synced_hf_dataset)
+ result = check_timestamps_sync(
+ hf_dataset=synced_hf_dataset,
+ episode_data_index=episode_data_index,
+ fps=fps,
+ tolerance_s=tolerance_s,
+ )
+ assert result is True
+
+
+def test_check_timestamps_sync_unsynced(unsynced_hf_dataset_factory):
+ fps = 30
+ tolerance_s = 1e-4
+ unsynced_hf_dataset = unsynced_hf_dataset_factory(fps, tolerance_s)
+ episode_data_index = calculate_episode_data_index(unsynced_hf_dataset)
+ with pytest.raises(ValueError):
+ check_timestamps_sync(
+ hf_dataset=unsynced_hf_dataset,
+ episode_data_index=episode_data_index,
+ fps=fps,
+ tolerance_s=tolerance_s,
+ )
+
+
+def test_check_timestamps_sync_unsynced_no_exception(unsynced_hf_dataset_factory):
+ fps = 30
+ tolerance_s = 1e-4
+ unsynced_hf_dataset = unsynced_hf_dataset_factory(fps, tolerance_s)
+ episode_data_index = calculate_episode_data_index(unsynced_hf_dataset)
+ result = check_timestamps_sync(
+ hf_dataset=unsynced_hf_dataset,
+ episode_data_index=episode_data_index,
+ fps=fps,
+ tolerance_s=tolerance_s,
+ raise_value_error=False,
+ )
+ assert result is False
+
+
+def test_check_timestamps_sync_slightly_off(slightly_off_hf_dataset_factory):
+ fps = 30
+ tolerance_s = 1e-4
+ slightly_off_hf_dataset = slightly_off_hf_dataset_factory(fps, tolerance_s)
+ episode_data_index = calculate_episode_data_index(slightly_off_hf_dataset)
+ result = check_timestamps_sync(
+ hf_dataset=slightly_off_hf_dataset,
+ episode_data_index=episode_data_index,
+ fps=fps,
+ tolerance_s=tolerance_s,
+ )
+ assert result is True
+
+
+def test_check_timestamps_sync_single_timestamp():
+ single_timestamp_hf_dataset = Dataset.from_dict({"timestamp": [0.0], "episode_index": [0]})
+ single_timestamp_hf_dataset.set_transform(hf_transform_to_torch)
+ episode_data_index = {"to": torch.tensor([1]), "from": torch.tensor([0])}
+ fps = 30
+ tolerance_s = 1e-4
+ result = check_timestamps_sync(
+ hf_dataset=single_timestamp_hf_dataset,
+ episode_data_index=episode_data_index,
+ fps=fps,
+ tolerance_s=tolerance_s,
+ )
+ assert result is True
+
+
+# TODO(aliberts): Change behavior of hf_transform_to_torch so that it can work with empty dataset
+@pytest.mark.skip("TODO: fix")
+def test_check_timestamps_sync_empty_dataset():
+ fps = 30
+ tolerance_s = 1e-4
+ empty_hf_dataset = Dataset.from_dict({"timestamp": [], "episode_index": []})
+ empty_hf_dataset.set_transform(hf_transform_to_torch)
+ episode_data_index = {
+ "to": torch.tensor([], dtype=torch.int64),
+ "from": torch.tensor([], dtype=torch.int64),
+ }
+ result = check_timestamps_sync(
+ hf_dataset=empty_hf_dataset,
+ episode_data_index=episode_data_index,
+ fps=fps,
+ tolerance_s=tolerance_s,
+ )
+ assert result is True
+
+
+def test_check_delta_timestamps_valid(valid_delta_timestamps_factory):
+ fps = 30
+ tolerance_s = 1e-4
+ valid_delta_timestamps = valid_delta_timestamps_factory(fps)
+ result = check_delta_timestamps(
+ delta_timestamps=valid_delta_timestamps,
+ fps=fps,
+ tolerance_s=tolerance_s,
+ )
+ assert result is True
+
+
+def test_check_delta_timestamps_slightly_off(slightly_off_delta_timestamps_factory):
+ fps = 30
+ tolerance_s = 1e-4
+ slightly_off_delta_timestamps = slightly_off_delta_timestamps_factory(fps, tolerance_s)
+ result = check_delta_timestamps(
+ delta_timestamps=slightly_off_delta_timestamps,
+ fps=fps,
+ tolerance_s=tolerance_s,
+ )
+ assert result is True
+
+
+def test_check_delta_timestamps_invalid(invalid_delta_timestamps_factory):
+ fps = 30
+ tolerance_s = 1e-4
+ invalid_delta_timestamps = invalid_delta_timestamps_factory(fps, tolerance_s)
+ with pytest.raises(ValueError):
+ check_delta_timestamps(
+ delta_timestamps=invalid_delta_timestamps,
+ fps=fps,
+ tolerance_s=tolerance_s,
+ )
+
+
+def test_check_delta_timestamps_invalid_no_exception(invalid_delta_timestamps_factory):
+ fps = 30
+ tolerance_s = 1e-4
+ invalid_delta_timestamps = invalid_delta_timestamps_factory(fps, tolerance_s)
+ result = check_delta_timestamps(
+ delta_timestamps=invalid_delta_timestamps,
+ fps=fps,
+ tolerance_s=tolerance_s,
+ raise_value_error=False,
+ )
+ assert result is False
+
+
+def test_check_delta_timestamps_empty():
+ delta_timestamps = {}
+ fps = 30
+ tolerance_s = 1e-4
+ result = check_delta_timestamps(
+ delta_timestamps=delta_timestamps,
+ fps=fps,
+ tolerance_s=tolerance_s,
+ )
+ assert result is True
+
+
+def test_delta_indices(valid_delta_timestamps_factory, delta_indices):
+ fps = 30
+ delta_timestamps = valid_delta_timestamps_factory(fps)
+ expected_delta_indices = delta_indices
+ actual_delta_indices = get_delta_indices(delta_timestamps, fps)
+ assert expected_delta_indices == actual_delta_indices
diff --git a/tests/test_examples.py b/tests/test_examples.py
index 0a6ce4225..f3b7948c7 100644
--- a/tests/test_examples.py
+++ b/tests/test_examples.py
@@ -13,12 +13,15 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-# TODO(aliberts): Mute logging for these tests
+
import io
import subprocess
import sys
from pathlib import Path
+import pytest
+
+from tests.fixtures.constants import DUMMY_REPO_ID
from tests.utils import require_package
@@ -29,6 +32,7 @@ def _find_and_replace(text: str, finds_and_replaces: list[tuple[str, str]]) -> s
return text
+# TODO(aliberts): Remove usage of subprocess calls and patch code with fixtures
def _run_script(path):
subprocess.run([sys.executable, path], check=True)
@@ -38,12 +42,26 @@ def _read_file(path):
return file.read()
-def test_example_1():
+@pytest.mark.skip("TODO Fix and remove subprocess / excec calls")
+def test_example_1(tmp_path, lerobot_dataset_factory):
+ _ = lerobot_dataset_factory(root=tmp_path, repo_id=DUMMY_REPO_ID)
path = "examples/1_load_lerobot_dataset.py"
- _run_script(path)
+ file_contents = _read_file(path)
+ file_contents = _find_and_replace(
+ file_contents,
+ [
+ ('repo_id = "lerobot/pusht"', f'repo_id = "{DUMMY_REPO_ID}"'),
+ (
+ "LeRobotDataset(repo_id",
+ f"LeRobotDataset(repo_id, root='{str(tmp_path)}', local_files_only=True",
+ ),
+ ],
+ )
+ exec(file_contents, {})
assert Path("outputs/examples/1_load_lerobot_dataset/episode_0.mp4").exists()
+@pytest.mark.skip("TODO Fix and remove subprocess / excec calls")
@require_package("gym_pusht")
def test_examples_basic2_basic3_advanced1():
"""
@@ -111,7 +129,8 @@ def test_examples_basic2_basic3_advanced1():
'# pretrained_policy_path = Path("outputs/train/example_pusht_diffusion")',
'pretrained_policy_path = Path("outputs/train/example_pusht_diffusion")',
),
- ('split=f"train[{first_val_frame_index}:]"', 'split="train[30:]"'),
+ ("train_episodes = episodes[:num_train_episodes]", "train_episodes = [0]"),
+ ("val_episodes = episodes[num_train_episodes:]", "val_episodes = [1]"),
("num_workers=4", "num_workers=0"),
('device = torch.device("cuda")', 'device = torch.device("cpu")'),
("batch_size=64", "batch_size=1"),
diff --git a/tests/test_image_transforms.py b/tests/test_image_transforms.py
index ccc40ddfc..8b1a0f4b1 100644
--- a/tests/test_image_transforms.py
+++ b/tests/test_image_transforms.py
@@ -15,15 +15,12 @@
# limitations under the License.
from pathlib import Path
-import numpy as np
import pytest
import torch
-from PIL import Image
from safetensors.torch import load_file
from torchvision.transforms import v2
from torchvision.transforms.v2 import functional as F # noqa: N812
-from lerobot.common.datasets.lerobot_dataset import LeRobotDataset
from lerobot.common.datasets.transforms import RandomSubsetApply, SharpnessJitter, get_image_transforms
from lerobot.common.utils.utils import init_hydra_config, seeded_context
from lerobot.scripts.visualize_image_transforms import visualize_transforms
@@ -33,21 +30,6 @@
DATASET_REPO_ID = "lerobot/aloha_mobile_shrimp"
-def load_png_to_tensor(path: Path):
- return torch.from_numpy(np.array(Image.open(path).convert("RGB"))).permute(2, 0, 1)
-
-
-@pytest.fixture
-def img():
- dataset = LeRobotDataset(DATASET_REPO_ID)
- return dataset[0][dataset.camera_keys[0]]
-
-
-@pytest.fixture
-def img_random():
- return torch.rand(3, 480, 640)
-
-
@pytest.fixture
def color_jitters():
return [
@@ -67,47 +49,54 @@ def default_transforms():
return load_file(ARTIFACT_DIR / "default_transforms.safetensors")
-def test_get_image_transforms_no_transform(img):
+def test_get_image_transforms_no_transform(img_tensor_factory):
+ img_tensor = img_tensor_factory()
tf_actual = get_image_transforms(brightness_min_max=(0.5, 0.5), max_num_transforms=0)
- torch.testing.assert_close(tf_actual(img), img)
+ torch.testing.assert_close(tf_actual(img_tensor), img_tensor)
@pytest.mark.parametrize("min_max", [(0.5, 0.5), (2.0, 2.0)])
-def test_get_image_transforms_brightness(img, min_max):
+def test_get_image_transforms_brightness(img_tensor_factory, min_max):
+ img_tensor = img_tensor_factory()
tf_actual = get_image_transforms(brightness_weight=1.0, brightness_min_max=min_max)
tf_expected = v2.ColorJitter(brightness=min_max)
- torch.testing.assert_close(tf_actual(img), tf_expected(img))
+ torch.testing.assert_close(tf_actual(img_tensor), tf_expected(img_tensor))
@pytest.mark.parametrize("min_max", [(0.5, 0.5), (2.0, 2.0)])
-def test_get_image_transforms_contrast(img, min_max):
+def test_get_image_transforms_contrast(img_tensor_factory, min_max):
+ img_tensor = img_tensor_factory()
tf_actual = get_image_transforms(contrast_weight=1.0, contrast_min_max=min_max)
tf_expected = v2.ColorJitter(contrast=min_max)
- torch.testing.assert_close(tf_actual(img), tf_expected(img))
+ torch.testing.assert_close(tf_actual(img_tensor), tf_expected(img_tensor))
@pytest.mark.parametrize("min_max", [(0.5, 0.5), (2.0, 2.0)])
-def test_get_image_transforms_saturation(img, min_max):
+def test_get_image_transforms_saturation(img_tensor_factory, min_max):
+ img_tensor = img_tensor_factory()
tf_actual = get_image_transforms(saturation_weight=1.0, saturation_min_max=min_max)
tf_expected = v2.ColorJitter(saturation=min_max)
- torch.testing.assert_close(tf_actual(img), tf_expected(img))
+ torch.testing.assert_close(tf_actual(img_tensor), tf_expected(img_tensor))
@pytest.mark.parametrize("min_max", [(-0.25, -0.25), (0.25, 0.25)])
-def test_get_image_transforms_hue(img, min_max):
+def test_get_image_transforms_hue(img_tensor_factory, min_max):
+ img_tensor = img_tensor_factory()
tf_actual = get_image_transforms(hue_weight=1.0, hue_min_max=min_max)
tf_expected = v2.ColorJitter(hue=min_max)
- torch.testing.assert_close(tf_actual(img), tf_expected(img))
+ torch.testing.assert_close(tf_actual(img_tensor), tf_expected(img_tensor))
@pytest.mark.parametrize("min_max", [(0.5, 0.5), (2.0, 2.0)])
-def test_get_image_transforms_sharpness(img, min_max):
+def test_get_image_transforms_sharpness(img_tensor_factory, min_max):
+ img_tensor = img_tensor_factory()
tf_actual = get_image_transforms(sharpness_weight=1.0, sharpness_min_max=min_max)
tf_expected = SharpnessJitter(sharpness=min_max)
- torch.testing.assert_close(tf_actual(img), tf_expected(img))
+ torch.testing.assert_close(tf_actual(img_tensor), tf_expected(img_tensor))
-def test_get_image_transforms_max_num_transforms(img):
+def test_get_image_transforms_max_num_transforms(img_tensor_factory):
+ img_tensor = img_tensor_factory()
tf_actual = get_image_transforms(
brightness_min_max=(0.5, 0.5),
contrast_min_max=(0.5, 0.5),
@@ -125,12 +114,13 @@ def test_get_image_transforms_max_num_transforms(img):
SharpnessJitter(sharpness=(0.5, 0.5)),
]
)
- torch.testing.assert_close(tf_actual(img), tf_expected(img))
+ torch.testing.assert_close(tf_actual(img_tensor), tf_expected(img_tensor))
@require_x86_64_kernel
-def test_get_image_transforms_random_order(img):
+def test_get_image_transforms_random_order(img_tensor_factory):
out_imgs = []
+ img_tensor = img_tensor_factory()
tf = get_image_transforms(
brightness_min_max=(0.5, 0.5),
contrast_min_max=(0.5, 0.5),
@@ -141,13 +131,14 @@ def test_get_image_transforms_random_order(img):
)
with seeded_context(1337):
for _ in range(10):
- out_imgs.append(tf(img))
+ out_imgs.append(tf(img_tensor))
for i in range(1, len(out_imgs)):
with pytest.raises(AssertionError):
torch.testing.assert_close(out_imgs[0], out_imgs[i])
+@pytest.mark.skip("TODO after v2 migration / removing hydra")
@pytest.mark.parametrize(
"transform, min_max_values",
[
@@ -158,21 +149,24 @@ def test_get_image_transforms_random_order(img):
("sharpness", [(0.5, 0.5), (2.0, 2.0)]),
],
)
-def test_backward_compatibility_torchvision(transform, min_max_values, img, single_transforms):
+def test_backward_compatibility_torchvision(img_tensor_factory, transform, min_max_values, single_transforms):
+ img_tensor = img_tensor_factory()
for min_max in min_max_values:
kwargs = {
f"{transform}_weight": 1.0,
f"{transform}_min_max": min_max,
}
tf = get_image_transforms(**kwargs)
- actual = tf(img)
+ actual = tf(img_tensor)
key = f"{transform}_{min_max[0]}_{min_max[1]}"
expected = single_transforms[key]
torch.testing.assert_close(actual, expected)
+@pytest.mark.skip("TODO after v2 migration / removing hydra")
@require_x86_64_kernel
-def test_backward_compatibility_default_config(img, default_transforms):
+def test_backward_compatibility_default_config(img_tensor_factory, default_transforms):
+ img_tensor = img_tensor_factory()
cfg = init_hydra_config(DEFAULT_CONFIG_PATH)
cfg_tf = cfg.training.image_transforms
default_tf = get_image_transforms(
@@ -191,7 +185,7 @@ def test_backward_compatibility_default_config(img, default_transforms):
)
with seeded_context(1337):
- actual = default_tf(img)
+ actual = default_tf(img_tensor)
expected = default_transforms["default"]
@@ -199,33 +193,36 @@ def test_backward_compatibility_default_config(img, default_transforms):
@pytest.mark.parametrize("p", [[0, 1], [1, 0]])
-def test_random_subset_apply_single_choice(p, img):
+def test_random_subset_apply_single_choice(img_tensor_factory, p):
+ img_tensor = img_tensor_factory()
flips = [v2.RandomHorizontalFlip(p=1), v2.RandomVerticalFlip(p=1)]
random_choice = RandomSubsetApply(flips, p=p, n_subset=1, random_order=False)
- actual = random_choice(img)
+ actual = random_choice(img_tensor)
p_horz, _ = p
if p_horz:
- torch.testing.assert_close(actual, F.horizontal_flip(img))
+ torch.testing.assert_close(actual, F.horizontal_flip(img_tensor))
else:
- torch.testing.assert_close(actual, F.vertical_flip(img))
+ torch.testing.assert_close(actual, F.vertical_flip(img_tensor))
-def test_random_subset_apply_random_order(img):
+def test_random_subset_apply_random_order(img_tensor_factory):
+ img_tensor = img_tensor_factory()
flips = [v2.RandomHorizontalFlip(p=1), v2.RandomVerticalFlip(p=1)]
random_order = RandomSubsetApply(flips, p=[0.5, 0.5], n_subset=2, random_order=True)
# We can't really check whether the transforms are actually applied in random order. However,
# horizontal and vertical flip are commutative. Meaning, even under the assumption that the transform
# applies them in random order, we can use a fixed order to compute the expected value.
- actual = random_order(img)
- expected = v2.Compose(flips)(img)
+ actual = random_order(img_tensor)
+ expected = v2.Compose(flips)(img_tensor)
torch.testing.assert_close(actual, expected)
-def test_random_subset_apply_valid_transforms(color_jitters, img):
+def test_random_subset_apply_valid_transforms(img_tensor_factory, color_jitters):
+ img_tensor = img_tensor_factory()
transform = RandomSubsetApply(color_jitters)
- output = transform(img)
- assert output.shape == img.shape
+ output = transform(img_tensor)
+ assert output.shape == img_tensor.shape
def test_random_subset_apply_probability_length_mismatch(color_jitters):
@@ -239,16 +236,18 @@ def test_random_subset_apply_invalid_n_subset(color_jitters, n_subset):
RandomSubsetApply(color_jitters, n_subset=n_subset)
-def test_sharpness_jitter_valid_range_tuple(img):
+def test_sharpness_jitter_valid_range_tuple(img_tensor_factory):
+ img_tensor = img_tensor_factory()
tf = SharpnessJitter((0.1, 2.0))
- output = tf(img)
- assert output.shape == img.shape
+ output = tf(img_tensor)
+ assert output.shape == img_tensor.shape
-def test_sharpness_jitter_valid_range_float(img):
+def test_sharpness_jitter_valid_range_float(img_tensor_factory):
+ img_tensor = img_tensor_factory()
tf = SharpnessJitter(0.5)
- output = tf(img)
- assert output.shape == img.shape
+ output = tf(img_tensor)
+ assert output.shape == img_tensor.shape
def test_sharpness_jitter_invalid_range_min_negative():
@@ -261,6 +260,7 @@ def test_sharpness_jitter_invalid_range_max_smaller():
SharpnessJitter((2.0, 0.1))
+@pytest.mark.skip("TODO after v2 migration / removing hydra")
@pytest.mark.parametrize(
"repo_id, n_examples",
[
diff --git a/tests/test_image_writer.py b/tests/test_image_writer.py
new file mode 100644
index 000000000..f51e86b47
--- /dev/null
+++ b/tests/test_image_writer.py
@@ -0,0 +1,359 @@
+import queue
+import time
+from multiprocessing import queues
+from unittest.mock import MagicMock, patch
+
+import numpy as np
+import pytest
+from PIL import Image
+
+from lerobot.common.datasets.image_writer import (
+ AsyncImageWriter,
+ image_array_to_image,
+ safe_stop_image_writer,
+ write_image,
+)
+
+DUMMY_IMAGE = "test_image.png"
+
+
+def test_init_threading():
+ writer = AsyncImageWriter(num_processes=0, num_threads=2)
+ try:
+ assert writer.num_processes == 0
+ assert writer.num_threads == 2
+ assert isinstance(writer.queue, queue.Queue)
+ assert len(writer.threads) == 2
+ assert len(writer.processes) == 0
+ assert all(t.is_alive() for t in writer.threads)
+ finally:
+ writer.stop()
+
+
+def test_init_multiprocessing():
+ writer = AsyncImageWriter(num_processes=2, num_threads=2)
+ try:
+ assert writer.num_processes == 2
+ assert writer.num_threads == 2
+ assert isinstance(writer.queue, queues.JoinableQueue)
+ assert len(writer.threads) == 0
+ assert len(writer.processes) == 2
+ assert all(p.is_alive() for p in writer.processes)
+ finally:
+ writer.stop()
+
+
+def test_zero_threads():
+ with pytest.raises(ValueError):
+ AsyncImageWriter(num_processes=0, num_threads=0)
+
+
+def test_image_array_to_image_rgb(img_array_factory):
+ img_array = img_array_factory(100, 100)
+ result_image = image_array_to_image(img_array)
+ assert isinstance(result_image, Image.Image)
+ assert result_image.size == (100, 100)
+ assert result_image.mode == "RGB"
+
+
+def test_image_array_to_image_pytorch_format(img_array_factory):
+ img_array = img_array_factory(100, 100).transpose(2, 0, 1)
+ result_image = image_array_to_image(img_array)
+ assert isinstance(result_image, Image.Image)
+ assert result_image.size == (100, 100)
+ assert result_image.mode == "RGB"
+
+
+@pytest.mark.skip("TODO: implement")
+def test_image_array_to_image_single_channel(img_array_factory):
+ img_array = img_array_factory(channels=1)
+ result_image = image_array_to_image(img_array)
+ assert isinstance(result_image, Image.Image)
+ assert result_image.size == (100, 100)
+ assert result_image.mode == "L"
+
+
+def test_image_array_to_image_float_array(img_array_factory):
+ img_array = img_array_factory(dtype=np.float32)
+ result_image = image_array_to_image(img_array)
+ assert isinstance(result_image, Image.Image)
+ assert result_image.size == (100, 100)
+ assert result_image.mode == "RGB"
+ assert np.array(result_image).dtype == np.uint8
+
+
+def test_image_array_to_image_out_of_bounds_float():
+ # Float array with values out of [0, 1]
+ img_array = np.random.uniform(-1, 2, size=(100, 100, 3)).astype(np.float32)
+ result_image = image_array_to_image(img_array)
+ assert isinstance(result_image, Image.Image)
+ assert result_image.size == (100, 100)
+ assert result_image.mode == "RGB"
+ assert np.array(result_image).dtype == np.uint8
+ assert np.array(result_image).min() >= 0 and np.array(result_image).max() <= 255
+
+
+def test_write_image_numpy(tmp_path, img_array_factory):
+ image_array = img_array_factory()
+ fpath = tmp_path / DUMMY_IMAGE
+ write_image(image_array, fpath)
+ assert fpath.exists()
+ saved_image = np.array(Image.open(fpath))
+ assert np.array_equal(image_array, saved_image)
+
+
+def test_write_image_image(tmp_path, img_factory):
+ image_pil = img_factory()
+ fpath = tmp_path / DUMMY_IMAGE
+ write_image(image_pil, fpath)
+ assert fpath.exists()
+ saved_image = Image.open(fpath)
+ assert list(saved_image.getdata()) == list(image_pil.getdata())
+ assert np.array_equal(image_pil, saved_image)
+
+
+def test_write_image_exception(tmp_path):
+ image_array = "invalid data"
+ fpath = tmp_path / DUMMY_IMAGE
+ with patch("builtins.print") as mock_print:
+ write_image(image_array, fpath)
+ mock_print.assert_called()
+ assert not fpath.exists()
+
+
+def test_save_image_numpy(tmp_path, img_array_factory):
+ writer = AsyncImageWriter()
+ try:
+ image_array = img_array_factory()
+ fpath = tmp_path / DUMMY_IMAGE
+ fpath.parent.mkdir(parents=True, exist_ok=True)
+ writer.save_image(image_array, fpath)
+ writer.wait_until_done()
+ assert fpath.exists()
+ saved_image = np.array(Image.open(fpath))
+ assert np.array_equal(image_array, saved_image)
+ finally:
+ writer.stop()
+
+
+def test_save_image_numpy_multiprocessing(tmp_path, img_array_factory):
+ writer = AsyncImageWriter(num_processes=2, num_threads=2)
+ try:
+ image_array = img_array_factory()
+ fpath = tmp_path / DUMMY_IMAGE
+ writer.save_image(image_array, fpath)
+ writer.wait_until_done()
+ assert fpath.exists()
+ saved_image = np.array(Image.open(fpath))
+ assert np.array_equal(image_array, saved_image)
+ finally:
+ writer.stop()
+
+
+def test_save_image_torch(tmp_path, img_tensor_factory):
+ writer = AsyncImageWriter()
+ try:
+ image_tensor = img_tensor_factory()
+ fpath = tmp_path / DUMMY_IMAGE
+ fpath.parent.mkdir(parents=True, exist_ok=True)
+ writer.save_image(image_tensor, fpath)
+ writer.wait_until_done()
+ assert fpath.exists()
+ saved_image = np.array(Image.open(fpath))
+ expected_image = (image_tensor.permute(1, 2, 0).cpu().numpy() * 255).astype(np.uint8)
+ assert np.array_equal(expected_image, saved_image)
+ finally:
+ writer.stop()
+
+
+def test_save_image_torch_multiprocessing(tmp_path, img_tensor_factory):
+ writer = AsyncImageWriter(num_processes=2, num_threads=2)
+ try:
+ image_tensor = img_tensor_factory()
+ fpath = tmp_path / DUMMY_IMAGE
+ writer.save_image(image_tensor, fpath)
+ writer.wait_until_done()
+ assert fpath.exists()
+ saved_image = np.array(Image.open(fpath))
+ expected_image = (image_tensor.permute(1, 2, 0).cpu().numpy() * 255).astype(np.uint8)
+ assert np.array_equal(expected_image, saved_image)
+ finally:
+ writer.stop()
+
+
+def test_save_image_pil(tmp_path, img_factory):
+ writer = AsyncImageWriter()
+ try:
+ image_pil = img_factory()
+ fpath = tmp_path / DUMMY_IMAGE
+ fpath.parent.mkdir(parents=True, exist_ok=True)
+ writer.save_image(image_pil, fpath)
+ writer.wait_until_done()
+ assert fpath.exists()
+ saved_image = Image.open(fpath)
+ assert list(saved_image.getdata()) == list(image_pil.getdata())
+ finally:
+ writer.stop()
+
+
+def test_save_image_pil_multiprocessing(tmp_path, img_factory):
+ writer = AsyncImageWriter(num_processes=2, num_threads=2)
+ try:
+ image_pil = img_factory()
+ fpath = tmp_path / DUMMY_IMAGE
+ writer.save_image(image_pil, fpath)
+ writer.wait_until_done()
+ assert fpath.exists()
+ saved_image = Image.open(fpath)
+ assert list(saved_image.getdata()) == list(image_pil.getdata())
+ finally:
+ writer.stop()
+
+
+def test_save_image_invalid_data(tmp_path):
+ writer = AsyncImageWriter()
+ try:
+ image_array = "invalid data"
+ fpath = tmp_path / DUMMY_IMAGE
+ fpath.parent.mkdir(parents=True, exist_ok=True)
+ with patch("builtins.print") as mock_print:
+ writer.save_image(image_array, fpath)
+ writer.wait_until_done()
+ mock_print.assert_called()
+ assert not fpath.exists()
+ finally:
+ writer.stop()
+
+
+def test_save_image_after_stop(tmp_path, img_array_factory):
+ writer = AsyncImageWriter()
+ writer.stop()
+ image_array = img_array_factory()
+ fpath = tmp_path / DUMMY_IMAGE
+ writer.save_image(image_array, fpath)
+ time.sleep(1)
+ assert not fpath.exists()
+
+
+def test_stop():
+ writer = AsyncImageWriter(num_processes=0, num_threads=2)
+ writer.stop()
+ assert not any(t.is_alive() for t in writer.threads)
+
+
+def test_stop_multiprocessing():
+ writer = AsyncImageWriter(num_processes=2, num_threads=2)
+ writer.stop()
+ assert not any(p.is_alive() for p in writer.processes)
+
+
+def test_multiple_stops():
+ writer = AsyncImageWriter()
+ writer.stop()
+ writer.stop() # Should not raise an exception
+ assert not any(t.is_alive() for t in writer.threads)
+
+
+def test_multiple_stops_multiprocessing():
+ writer = AsyncImageWriter(num_processes=2, num_threads=2)
+ writer.stop()
+ writer.stop() # Should not raise an exception
+ assert not any(t.is_alive() for t in writer.threads)
+
+
+def test_wait_until_done(tmp_path, img_array_factory):
+ writer = AsyncImageWriter(num_processes=0, num_threads=4)
+ try:
+ num_images = 100
+ image_arrays = [img_array_factory(height=500, width=500) for _ in range(num_images)]
+ fpaths = [tmp_path / f"frame_{i:06d}.png" for i in range(num_images)]
+ for image_array, fpath in zip(image_arrays, fpaths, strict=True):
+ fpath.parent.mkdir(parents=True, exist_ok=True)
+ writer.save_image(image_array, fpath)
+ writer.wait_until_done()
+ for i, fpath in enumerate(fpaths):
+ assert fpath.exists()
+ saved_image = np.array(Image.open(fpath))
+ assert np.array_equal(saved_image, image_arrays[i])
+ finally:
+ writer.stop()
+
+
+def test_wait_until_done_multiprocessing(tmp_path, img_array_factory):
+ writer = AsyncImageWriter(num_processes=2, num_threads=2)
+ try:
+ num_images = 100
+ image_arrays = [img_array_factory() for _ in range(num_images)]
+ fpaths = [tmp_path / f"frame_{i:06d}.png" for i in range(num_images)]
+ for image_array, fpath in zip(image_arrays, fpaths, strict=True):
+ fpath.parent.mkdir(parents=True, exist_ok=True)
+ writer.save_image(image_array, fpath)
+ writer.wait_until_done()
+ for i, fpath in enumerate(fpaths):
+ assert fpath.exists()
+ saved_image = np.array(Image.open(fpath))
+ assert np.array_equal(saved_image, image_arrays[i])
+ finally:
+ writer.stop()
+
+
+def test_exception_handling(tmp_path, img_array_factory):
+ writer = AsyncImageWriter()
+ try:
+ image_array = img_array_factory()
+ with (
+ patch.object(writer.queue, "put", side_effect=queue.Full("Queue is full")),
+ pytest.raises(queue.Full) as exc_info,
+ ):
+ writer.save_image(image_array, tmp_path / "test.png")
+ assert str(exc_info.value) == "Queue is full"
+ finally:
+ writer.stop()
+
+
+def test_with_different_image_formats(tmp_path, img_array_factory):
+ writer = AsyncImageWriter()
+ try:
+ image_array = img_array_factory()
+ formats = ["png", "jpeg", "bmp"]
+ for fmt in formats:
+ fpath = tmp_path / f"test_image.{fmt}"
+ write_image(image_array, fpath)
+ assert fpath.exists()
+ finally:
+ writer.stop()
+
+
+def test_safe_stop_image_writer_decorator():
+ class MockDataset:
+ def __init__(self):
+ self.image_writer = MagicMock(spec=AsyncImageWriter)
+
+ @safe_stop_image_writer
+ def function_that_raises_exception(dataset=None):
+ raise Exception("Test exception")
+
+ dataset = MockDataset()
+
+ with pytest.raises(Exception) as exc_info:
+ function_that_raises_exception(dataset=dataset)
+
+ assert str(exc_info.value) == "Test exception"
+ dataset.image_writer.stop.assert_called_once()
+
+
+def test_main_process_time(tmp_path, img_tensor_factory):
+ writer = AsyncImageWriter()
+ try:
+ image_tensor = img_tensor_factory()
+ fpath = tmp_path / DUMMY_IMAGE
+ start_time = time.perf_counter()
+ writer.save_image(image_tensor, fpath)
+ end_time = time.perf_counter()
+ time_spent = end_time - start_time
+ # Might need to adjust this threshold depending on hardware
+ assert time_spent < 0.01, f"Main process time exceeded threshold: {time_spent}s"
+ writer.wait_until_done()
+ assert fpath.exists()
+ finally:
+ writer.stop()
diff --git a/tests/test_motors.py b/tests/test_motors.py
index 48c2e8d8d..2f668926c 100644
--- a/tests/test_motors.py
+++ b/tests/test_motors.py
@@ -1,11 +1,23 @@
"""
-Tests meant to be used locally and launched manually.
+Tests for physical motors and their mocked versions.
+If the physical motors are not connected to the computer, or not working,
+the test will be skipped.
-Example usage:
+Example of running a specific test:
```bash
pytest -sx tests/test_motors.py::test_find_port
pytest -sx tests/test_motors.py::test_motors_bus
```
+
+Example of running test on real dynamixel motors connected to the computer:
+```bash
+pytest -sx 'tests/test_motors.py::test_motors_bus[dynamixel-False]'
+```
+
+Example of running test on a mocked version of dynamixel motors:
+```bash
+pytest -sx 'tests/test_motors.py::test_motors_bus[dynamixel-True]'
+```
"""
# TODO(rcadene): measure fps in nightly?
@@ -18,55 +30,63 @@
import numpy as np
import pytest
-from lerobot import available_robots
-from lerobot.common.robot_devices.motors.utils import MotorsBus
-from lerobot.common.robot_devices.robots.factory import make_robot
from lerobot.common.robot_devices.utils import RobotDeviceAlreadyConnectedError, RobotDeviceNotConnectedError
-from lerobot.common.utils.utils import init_hydra_config
-from tests.utils import ROBOT_CONFIG_PATH_TEMPLATE, require_robot
-
+from lerobot.scripts.find_motors_bus_port import find_port
+from tests.utils import TEST_MOTOR_TYPES, make_motors_bus, require_motor
+
+
+@pytest.mark.parametrize("motor_type, mock", TEST_MOTOR_TYPES)
+@require_motor
+def test_find_port(request, motor_type, mock):
+ if mock:
+ request.getfixturevalue("patch_builtins_input")
+ with pytest.raises(OSError):
+ find_port()
+ else:
+ find_port()
+
+
+@pytest.mark.parametrize("motor_type, mock", TEST_MOTOR_TYPES)
+@require_motor
+def test_configure_motors_all_ids_1(request, motor_type, mock):
+ if mock:
+ request.getfixturevalue("patch_builtins_input")
+
+ if motor_type == "dynamixel":
+ # see X_SERIES_BAUDRATE_TABLE
+ smaller_baudrate = 9_600
+ smaller_baudrate_value = 0
+ elif motor_type == "feetech":
+ # see SCS_SERIES_BAUDRATE_TABLE
+ smaller_baudrate = 19_200
+ smaller_baudrate_value = 7
+ else:
+ raise ValueError(motor_type)
-def make_motors_bus(robot_type: str) -> MotorsBus:
- # Instantiate a robot and return one of its leader arms
- config_path = ROBOT_CONFIG_PATH_TEMPLATE.format(robot=robot_type)
- robot_cfg = init_hydra_config(config_path)
- robot = make_robot(robot_cfg)
- first_bus_name = list(robot.leader_arms.keys())[0]
- motors_bus = robot.leader_arms[first_bus_name]
- return motors_bus
-
-
-@pytest.mark.parametrize("robot_type", available_robots)
-@require_robot
-def test_find_port(request, robot_type):
- from lerobot.common.robot_devices.motors.dynamixel import find_port
-
- find_port()
-
-
-@pytest.mark.parametrize("robot_type", available_robots)
-@require_robot
-def test_configure_motors_all_ids_1(request, robot_type):
input("Are you sure you want to re-configure the motors? Press enter to continue...")
# This test expect the configuration was already correct.
- motors_bus = make_motors_bus(robot_type)
+ motors_bus = make_motors_bus(motor_type, mock=mock)
motors_bus.connect()
- motors_bus.write("Baud_Rate", [0] * len(motors_bus.motors))
- motors_bus.set_bus_baudrate(9_600)
+ motors_bus.write("Baud_Rate", [smaller_baudrate_value] * len(motors_bus.motors))
+
+ motors_bus.set_bus_baudrate(smaller_baudrate)
motors_bus.write("ID", [1] * len(motors_bus.motors))
del motors_bus
# Test configure
- motors_bus = make_motors_bus(robot_type)
+ motors_bus = make_motors_bus(motor_type, mock=mock)
motors_bus.connect()
assert motors_bus.are_motors_configured()
del motors_bus
-@pytest.mark.parametrize("robot_type", available_robots)
-@require_robot
-def test_motors_bus(request, robot_type):
- motors_bus = make_motors_bus(robot_type)
+@pytest.mark.parametrize("motor_type, mock", TEST_MOTOR_TYPES)
+@require_motor
+def test_motors_bus(request, motor_type, mock):
+ if mock:
+ request.getfixturevalue("patch_builtins_input")
+
+ motors_bus = make_motors_bus(motor_type, mock=mock)
# Test reading and writting before connecting raises an error
with pytest.raises(RobotDeviceNotConnectedError):
@@ -80,7 +100,7 @@ def test_motors_bus(request, robot_type):
del motors_bus
# Test connecting
- motors_bus = make_motors_bus(robot_type)
+ motors_bus = make_motors_bus(motor_type, mock=mock)
motors_bus.connect()
# Test connecting twice raises an error
diff --git a/tests/test_online_buffer.py b/tests/test_online_buffer.py
index 37000e4fb..092cd3d08 100644
--- a/tests/test_online_buffer.py
+++ b/tests/test_online_buffer.py
@@ -19,11 +19,8 @@
import numpy as np
import pytest
import torch
-from datasets import Dataset
-from lerobot.common.datasets.lerobot_dataset import LeRobotDataset
from lerobot.common.datasets.online_buffer import OnlineBuffer, compute_sampler_weights
-from lerobot.common.datasets.utils import hf_transform_to_torch
# Some constants for OnlineBuffer tests.
data_key = "data"
@@ -212,29 +209,17 @@ def test_delta_timestamps_outside_tolerance_outside_episode_range():
# Arbitrarily set small dataset sizes, making sure to have uneven sizes.
-@pytest.mark.parametrize("offline_dataset_size", [0, 6])
+@pytest.mark.parametrize("offline_dataset_size", [1, 6])
@pytest.mark.parametrize("online_dataset_size", [0, 4])
@pytest.mark.parametrize("online_sampling_ratio", [0.0, 1.0])
def test_compute_sampler_weights_trivial(
- offline_dataset_size: int, online_dataset_size: int, online_sampling_ratio: float
+ lerobot_dataset_factory,
+ tmp_path,
+ offline_dataset_size: int,
+ online_dataset_size: int,
+ online_sampling_ratio: float,
):
- # Pass/skip the test if both datasets sizes are zero.
- if offline_dataset_size + online_dataset_size == 0:
- return
- # Create spoof offline dataset.
- offline_dataset = LeRobotDataset.from_preloaded(
- hf_dataset=Dataset.from_dict({"data": list(range(offline_dataset_size))})
- )
- offline_dataset.hf_dataset.set_transform(hf_transform_to_torch)
- if offline_dataset_size == 0:
- offline_dataset.episode_data_index = {}
- else:
- # Set up an episode_data_index with at least two episodes.
- offline_dataset.episode_data_index = {
- "from": torch.tensor([0, offline_dataset_size // 2]),
- "to": torch.tensor([offline_dataset_size // 2, offline_dataset_size]),
- }
- # Create spoof online datset.
+ offline_dataset = lerobot_dataset_factory(tmp_path, total_episodes=1, total_frames=offline_dataset_size)
online_dataset, _ = make_new_buffer()
if online_dataset_size > 0:
online_dataset.add_data(
@@ -254,16 +239,9 @@ def test_compute_sampler_weights_trivial(
assert torch.allclose(weights, expected_weights)
-def test_compute_sampler_weights_nontrivial_ratio():
+def test_compute_sampler_weights_nontrivial_ratio(lerobot_dataset_factory, tmp_path):
# Arbitrarily set small dataset sizes, making sure to have uneven sizes.
- # Create spoof offline dataset.
- offline_dataset = LeRobotDataset.from_preloaded(hf_dataset=Dataset.from_dict({"data": list(range(4))}))
- offline_dataset.hf_dataset.set_transform(hf_transform_to_torch)
- offline_dataset.episode_data_index = {
- "from": torch.tensor([0, 2]),
- "to": torch.tensor([2, 4]),
- }
- # Create spoof online datset.
+ offline_dataset = lerobot_dataset_factory(tmp_path, total_episodes=1, total_frames=4)
online_dataset, _ = make_new_buffer()
online_dataset.add_data(make_spoof_data_frames(n_episodes=4, n_frames_per_episode=2))
online_sampling_ratio = 0.8
@@ -275,16 +253,9 @@ def test_compute_sampler_weights_nontrivial_ratio():
)
-def test_compute_sampler_weights_nontrivial_ratio_and_drop_last_n():
+def test_compute_sampler_weights_nontrivial_ratio_and_drop_last_n(lerobot_dataset_factory, tmp_path):
# Arbitrarily set small dataset sizes, making sure to have uneven sizes.
- # Create spoof offline dataset.
- offline_dataset = LeRobotDataset.from_preloaded(hf_dataset=Dataset.from_dict({"data": list(range(4))}))
- offline_dataset.hf_dataset.set_transform(hf_transform_to_torch)
- offline_dataset.episode_data_index = {
- "from": torch.tensor([0]),
- "to": torch.tensor([4]),
- }
- # Create spoof online datset.
+ offline_dataset = lerobot_dataset_factory(tmp_path, total_episodes=1, total_frames=4)
online_dataset, _ = make_new_buffer()
online_dataset.add_data(make_spoof_data_frames(n_episodes=4, n_frames_per_episode=2))
weights = compute_sampler_weights(
@@ -295,18 +266,9 @@ def test_compute_sampler_weights_nontrivial_ratio_and_drop_last_n():
)
-def test_compute_sampler_weights_drop_n_last_frames():
+def test_compute_sampler_weights_drop_n_last_frames(lerobot_dataset_factory, tmp_path):
"""Note: test copied from test_sampler."""
- data_dict = {
- "timestamp": [0, 0.1],
- "index": [0, 1],
- "episode_index": [0, 0],
- "frame_index": [0, 1],
- }
- offline_dataset = LeRobotDataset.from_preloaded(hf_dataset=Dataset.from_dict(data_dict))
- offline_dataset.hf_dataset.set_transform(hf_transform_to_torch)
- offline_dataset.episode_data_index = {"from": torch.tensor([0]), "to": torch.tensor([2])}
-
+ offline_dataset = lerobot_dataset_factory(tmp_path, total_episodes=1, total_frames=2)
online_dataset, _ = make_new_buffer()
online_dataset.add_data(make_spoof_data_frames(n_episodes=4, n_frames_per_episode=2))
diff --git a/tests/test_policies.py b/tests/test_policies.py
index d90f00716..ae3567433 100644
--- a/tests/test_policies.py
+++ b/tests/test_policies.py
@@ -50,7 +50,7 @@ def test_get_policy_and_config_classes(policy_name: str):
assert issubclass(config_cls, inspect.signature(policy_cls.__init__).parameters["config"].annotation)
-# TODO(aliberts): refactor using lerobot/__init__.py variables
+@pytest.mark.skip("TODO after v2 migration / removing hydra")
@pytest.mark.parametrize(
"env_name,policy_name,extra_overrides",
[
@@ -136,7 +136,7 @@ def test_policy(env_name, policy_name, extra_overrides):
# Check that we can make the policy object.
dataset = make_dataset(cfg)
- policy = make_policy(hydra_cfg=cfg, dataset_stats=dataset.stats)
+ policy = make_policy(hydra_cfg=cfg, dataset_stats=dataset.meta.stats)
# Check that the policy follows the required protocol.
assert isinstance(
policy, Policy
@@ -195,6 +195,7 @@ def test_policy(env_name, policy_name, extra_overrides):
env.step(action)
+@pytest.mark.skip("TODO after v2 migration / removing hydra")
def test_act_backbone_lr():
"""
Test that the ACT policy can be instantiated with a different learning rate for the backbone.
@@ -213,7 +214,7 @@ def test_act_backbone_lr():
assert cfg.training.lr_backbone == 0.001
dataset = make_dataset(cfg)
- policy = make_policy(hydra_cfg=cfg, dataset_stats=dataset.stats)
+ policy = make_policy(hydra_cfg=cfg, dataset_stats=dataset.meta.stats)
optimizer, _ = make_optimizer_and_scheduler(cfg, policy)
assert len(optimizer.param_groups) == 2
assert optimizer.param_groups[0]["lr"] == cfg.training.lr
@@ -351,6 +352,7 @@ def test_normalize(insert_temporal_dim):
unnormalize(output_batch)
+@pytest.mark.skip("TODO after v2 migration / removing hydra")
@pytest.mark.parametrize(
"env_name, policy_name, extra_overrides, file_name_extra",
[
@@ -367,8 +369,7 @@ def test_normalize(insert_temporal_dim):
),
("aloha", "act", ["policy.n_action_steps=10"], ""),
("aloha", "act", ["policy.n_action_steps=1000", "policy.chunk_size=1000"], "_1000_steps"),
- ("dora_aloha_real", "act_real", ["policy.n_action_steps=10"], ""),
- ("dora_aloha_real", "act_real_no_state", ["policy.n_action_steps=10"], ""),
+ ("dora_aloha_real", "act_aloha_real", ["policy.n_action_steps=10"], ""),
],
)
# As artifacts have been generated on an x86_64 kernel, this test won't
@@ -382,7 +383,7 @@ def test_backward_compatibility(env_name, policy_name, extra_overrides, file_nam
include a report on what changed and how that affected the outputs.
2. Go to the `if __name__ == "__main__"` block of `tests/scripts/save_policy_to_safetensors.py` and
add the policies you want to update the test artifacts for.
- 3. Run `DATA_DIR=tests/data python tests/scripts/save_policy_to_safetensors.py`. The test artifact
+ 3. Run `python tests/scripts/save_policy_to_safetensors.py`. The test artifact
should be updated.
4. Check that this test now passes.
5. Remember to restore `tests/scripts/save_policy_to_safetensors.py` to its original state.
diff --git a/tests/test_push_dataset_to_hub.py b/tests/test_push_dataset_to_hub.py
index f6725f871..ff630ab66 100644
--- a/tests/test_push_dataset_to_hub.py
+++ b/tests/test_push_dataset_to_hub.py
@@ -5,7 +5,7 @@
Example to run backward compatiblity tests locally:
```
-DATA_DIR=tests/data python -m pytest --run-skipped tests/test_push_dataset_to_hub.py::test_push_dataset_to_hub_pusht_backward_compatibility
+python -m pytest --run-skipped tests/test_push_dataset_to_hub.py::test_push_dataset_to_hub_pusht_backward_compatibility
```
"""
@@ -250,6 +250,7 @@ def test_push_dataset_to_hub_out_dir_force_override_false(tmpdir):
)
+@pytest.mark.skip("TODO after v2 migration / removing hydra")
@pytest.mark.parametrize(
"required_packages, raw_format, repo_id, make_test_data",
[
@@ -329,7 +330,7 @@ def test_push_dataset_to_hub_format(required_packages, tmpdir, raw_format, repo_
],
)
@pytest.mark.skip(
- "Not compatible with our CI since it downloads raw datasets. Run with `DATA_DIR=tests/data python -m pytest --run-skipped tests/test_push_dataset_to_hub.py::test_push_dataset_to_hub_pusht_backward_compatibility`"
+ "Not compatible with our CI since it downloads raw datasets. Run with `python -m pytest --run-skipped tests/test_push_dataset_to_hub.py::test_push_dataset_to_hub_pusht_backward_compatibility`"
)
def test_push_dataset_to_hub_pusht_backward_compatibility(tmpdir, raw_format, repo_id):
_, dataset_id = repo_id.split("/")
diff --git a/tests/test_robots.py b/tests/test_robots.py
index 4ce3805ee..05966ff15 100644
--- a/tests/test_robots.py
+++ b/tests/test_robots.py
@@ -1,10 +1,26 @@
"""
-Tests meant to be used locally and launched manually.
+Tests for physical robots and their mocked versions.
+If the physical robots are not connected to the computer, or not working,
+the test will be skipped.
-Example usage:
+Example of running a specific test:
```bash
pytest -sx tests/test_robots.py::test_robot
```
+
+Example of running test on real robots connected to the computer:
+```bash
+pytest -sx 'tests/test_robots.py::test_robot[koch-False]'
+pytest -sx 'tests/test_robots.py::test_robot[koch_bimanual-False]'
+pytest -sx 'tests/test_robots.py::test_robot[aloha-False]'
+```
+
+Example of running test on a mocked version of robots:
+```bash
+pytest -sx 'tests/test_robots.py::test_robot[koch-True]'
+pytest -sx 'tests/test_robots.py::test_robot[koch_bimanual-True]'
+pytest -sx 'tests/test_robots.py::test_robot[aloha-True]'
+```
"""
from pathlib import Path
@@ -12,41 +28,42 @@
import pytest
import torch
-from lerobot import available_robots
-from lerobot.common.robot_devices.robots.factory import make_robot as make_robot_from_cfg
-from lerobot.common.robot_devices.robots.utils import Robot
+from lerobot.common.robot_devices.robots.manipulator import ManipulatorRobot
from lerobot.common.robot_devices.utils import RobotDeviceAlreadyConnectedError, RobotDeviceNotConnectedError
-from lerobot.common.utils.utils import init_hydra_config
-from tests.utils import ROBOT_CONFIG_PATH_TEMPLATE, require_robot
-
+from tests.utils import TEST_ROBOT_TYPES, make_robot, mock_calibration_dir, require_robot
-def make_robot(robot_type: str, overrides: list[str] | None = None) -> Robot:
- config_path = ROBOT_CONFIG_PATH_TEMPLATE.format(robot=robot_type)
- robot_cfg = init_hydra_config(config_path, overrides)
- robot = make_robot_from_cfg(robot_cfg)
- return robot
-
-@pytest.mark.parametrize("robot_type", available_robots)
+@pytest.mark.parametrize("robot_type, mock", TEST_ROBOT_TYPES)
@require_robot
-def test_robot(tmpdir, request, robot_type):
+def test_robot(tmpdir, request, robot_type, mock):
# TODO(rcadene): measure fps in nightly?
# TODO(rcadene): test logs
# TODO(rcadene): add compatibility with other robots
- from lerobot.common.robot_devices.robots.manipulator import ManipulatorRobot
-
- # Save calibration preset
- tmpdir = Path(tmpdir)
- calibration_dir = tmpdir / robot_type
+ robot_kwargs = {"robot_type": robot_type}
+
+ if robot_type == "aloha" and mock:
+ # To simplify unit test, we do not rerun manual calibration for Aloha mock=True.
+ # Instead, we use the files from '.cache/calibration/aloha_default'
+ overrides_calibration_dir = None
+ else:
+ if mock:
+ request.getfixturevalue("patch_builtins_input")
+
+ # Create an empty calibration directory to trigger manual calibration
+ tmpdir = Path(tmpdir)
+ calibration_dir = tmpdir / robot_type
+ overrides_calibration_dir = [f"calibration_dir={calibration_dir}"]
+ mock_calibration_dir(calibration_dir)
+ robot_kwargs["calibration_dir"] = calibration_dir
# Test connecting without devices raises an error
- robot = ManipulatorRobot()
+ robot = ManipulatorRobot(**robot_kwargs)
with pytest.raises(ValueError):
robot.connect()
del robot
# Test using robot before connecting raises an error
- robot = ManipulatorRobot()
+ robot = ManipulatorRobot(**robot_kwargs)
with pytest.raises(RobotDeviceNotConnectedError):
robot.teleop_step()
with pytest.raises(RobotDeviceNotConnectedError):
@@ -61,21 +78,23 @@ def test_robot(tmpdir, request, robot_type):
# Test deleting the object without connecting first
del robot
- # Test connecting
- robot = make_robot(robot_type, overrides=[f"calibration_dir={calibration_dir}"])
- robot.connect() # run the manual calibration precedure
+ # Test connecting (triggers manual calibration)
+ robot = make_robot(robot_type, overrides=overrides_calibration_dir, mock=mock)
+ robot.connect()
assert robot.is_connected
# Test connecting twice raises an error
with pytest.raises(RobotDeviceAlreadyConnectedError):
robot.connect()
- # Test disconnecting with `__del__`
- del robot
+ # TODO(rcadene, aliberts): Test disconnecting with `__del__` instead of `disconnect`
+ # del robot
+ robot.disconnect()
# Test teleop can run
- robot = make_robot(robot_type, overrides=[f"calibration_dir={calibration_dir}"])
- robot.calibration_dir = calibration_dir
+ robot = make_robot(robot_type, overrides=overrides_calibration_dir, mock=mock)
+ if overrides_calibration_dir is not None:
+ robot.calibration_dir = calibration_dir
robot.connect()
robot.teleop_step()
@@ -108,6 +127,7 @@ def test_robot(tmpdir, request, robot_type):
# TODO(rcadene): skipping image for now as it's challenging to assess equality between two consecutive frames
continue
assert torch.allclose(captured_observation[name], observation[name], atol=1)
+ assert captured_observation[name].shape == observation[name].shape
# Test send_action can run
robot.send_action(action["action"])
@@ -121,4 +141,3 @@ def test_robot(tmpdir, request, robot_type):
assert not robot.leader_arms[name].is_connected
for name in robot.cameras:
assert not robot.cameras[name].is_connected
- del robot
diff --git a/tests/test_sampler.py b/tests/test_sampler.py
index 635e7f117..ee143f376 100644
--- a/tests/test_sampler.py
+++ b/tests/test_sampler.py
@@ -15,9 +15,9 @@
# limitations under the License.
from datasets import Dataset
+from lerobot.common.datasets.push_dataset_to_hub.utils import calculate_episode_data_index
from lerobot.common.datasets.sampler import EpisodeAwareSampler
from lerobot.common.datasets.utils import (
- calculate_episode_data_index,
hf_transform_to_torch,
)
diff --git a/tests/test_utils.py b/tests/test_utils.py
index e5ba22673..8880d28c3 100644
--- a/tests/test_utils.py
+++ b/tests/test_utils.py
@@ -7,10 +7,9 @@
import torch
from datasets import Dataset
+from lerobot.common.datasets.push_dataset_to_hub.utils import calculate_episode_data_index
from lerobot.common.datasets.utils import (
- calculate_episode_data_index,
hf_transform_to_torch,
- reset_episode_index,
)
from lerobot.common.utils.utils import (
get_global_random_state,
@@ -73,20 +72,6 @@ def test_calculate_episode_data_index():
assert torch.equal(episode_data_index["to"], torch.tensor([2, 3, 6]))
-def test_reset_episode_index():
- dataset = Dataset.from_dict(
- {
- "timestamp": [0.1, 0.2, 0.3, 0.4, 0.5, 0.6],
- "index": [0, 1, 2, 3, 4, 5],
- "episode_index": [10, 10, 11, 12, 12, 12],
- },
- )
- dataset.set_transform(hf_transform_to_torch)
- correct_episode_index = [0, 0, 1, 2, 2, 2]
- dataset = reset_episode_index(dataset)
- assert dataset["episode_index"] == correct_episode_index
-
-
def test_init_hydra_config_empty():
test_file = f"/tmp/test_init_hydra_config_empty_{uuid4().hex}.yaml"
with open(test_file, "w") as f:
diff --git a/tests/test_visualize_dataset.py b/tests/test_visualize_dataset.py
index 075e2b372..303342e3c 100644
--- a/tests/test_visualize_dataset.py
+++ b/tests/test_visualize_dataset.py
@@ -13,25 +13,21 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
-from pathlib import Path
-
import pytest
from lerobot.scripts.visualize_dataset import visualize_dataset
-@pytest.mark.parametrize(
- "repo_id",
- ["lerobot/pusht"],
-)
-@pytest.mark.parametrize("root", [Path(__file__).parent / "data"])
-def test_visualize_local_dataset(tmpdir, repo_id, root):
+@pytest.mark.skip("TODO: add dummy videos")
+def test_visualize_local_dataset(tmp_path, lerobot_dataset_factory):
+ root = tmp_path / "dataset"
+ output_dir = tmp_path / "outputs"
+ dataset = lerobot_dataset_factory(root=root)
rrd_path = visualize_dataset(
- repo_id,
+ dataset,
episode_index=0,
batch_size=32,
save=True,
- output_dir=tmpdir,
- root=root,
+ output_dir=output_dir,
)
assert rrd_path.exists()
diff --git a/tests/test_visualize_dataset_html.py b/tests/test_visualize_dataset_html.py
index 4dc3c0631..53924f567 100644
--- a/tests/test_visualize_dataset_html.py
+++ b/tests/test_visualize_dataset_html.py
@@ -14,23 +14,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-from pathlib import Path
-
-import pytest
-
from lerobot.scripts.visualize_dataset_html import visualize_dataset_html
-@pytest.mark.parametrize(
- "repo_id",
- ["lerobot/pusht"],
-)
-def test_visualize_dataset_html(tmpdir, repo_id):
- tmpdir = Path(tmpdir)
+def test_visualize_dataset_html(tmp_path, lerobot_dataset_factory):
+ root = tmp_path / "dataset"
+ output_dir = tmp_path / "outputs"
+ dataset = lerobot_dataset_factory(root=root)
visualize_dataset_html(
- repo_id,
+ dataset,
episodes=[0],
- output_dir=tmpdir,
+ output_dir=output_dir,
serve=False,
)
- assert (tmpdir / "static" / "episode_0.csv").exists()
+ assert (output_dir / "static" / "episode_0.csv").exists()
diff --git a/tests/utils.py b/tests/utils.py
index db214aeac..f24b3551c 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -13,13 +13,23 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+import json
+import os
import platform
+from copy import copy
from functools import wraps
+from pathlib import Path
import pytest
import torch
+from lerobot import available_cameras, available_motors, available_robots
+from lerobot.common.robot_devices.cameras.utils import Camera
+from lerobot.common.robot_devices.motors.utils import MotorsBus
+from lerobot.common.robot_devices.robots.factory import make_robot as make_robot_from_cfg
+from lerobot.common.robot_devices.robots.utils import Robot
from lerobot.common.utils.import_utils import is_package_available
+from lerobot.common.utils.utils import init_hydra_config
DEVICE = "cuda" if torch.cuda.is_available() else "cpu"
@@ -28,6 +38,42 @@
ROBOT_CONFIG_PATH_TEMPLATE = "lerobot/configs/robot/{robot}.yaml"
+TEST_ROBOT_TYPES = []
+for robot_type in available_robots:
+ TEST_ROBOT_TYPES += [(robot_type, True), (robot_type, False)]
+
+TEST_CAMERA_TYPES = []
+for camera_type in available_cameras:
+ TEST_CAMERA_TYPES += [(camera_type, True), (camera_type, False)]
+
+TEST_MOTOR_TYPES = []
+for motor_type in available_motors:
+ TEST_MOTOR_TYPES += [(motor_type, True), (motor_type, False)]
+
+# Camera indices used for connecting physical cameras
+OPENCV_CAMERA_INDEX = int(os.environ.get("LEROBOT_TEST_OPENCV_CAMERA_INDEX", 0))
+INTELREALSENSE_CAMERA_INDEX = int(os.environ.get("LEROBOT_TEST_INTELREALSENSE_CAMERA_INDEX", 128422271614))
+
+DYNAMIXEL_PORT = os.environ.get("LEROBOT_TEST_DYNAMIXEL_PORT", "/dev/tty.usbmodem575E0032081")
+DYNAMIXEL_MOTORS = {
+ "shoulder_pan": [1, "xl430-w250"],
+ "shoulder_lift": [2, "xl430-w250"],
+ "elbow_flex": [3, "xl330-m288"],
+ "wrist_flex": [4, "xl330-m288"],
+ "wrist_roll": [5, "xl330-m288"],
+ "gripper": [6, "xl330-m288"],
+}
+
+FEETECH_PORT = os.environ.get("LEROBOT_TEST_FEETECH_PORT", "/dev/tty.usbmodem585A0080971")
+FEETECH_MOTORS = {
+ "shoulder_pan": [1, "sts3215"],
+ "shoulder_lift": [2, "sts3215"],
+ "elbow_flex": [3, "sts3215"],
+ "wrist_flex": [4, "sts3215"],
+ "wrist_roll": [5, "sts3215"],
+ "gripper": [6, "sts3215"],
+}
+
def require_x86_64_kernel(func):
"""
@@ -173,13 +219,169 @@ def wrapper(*args, **kwargs):
# Access the pytest request context to get the is_robot_available fixture
request = kwargs.get("request")
robot_type = kwargs.get("robot_type")
+ mock = kwargs.get("mock")
+ if robot_type is None:
+ raise ValueError("The 'robot_type' must be an argument of the test function.")
if request is None:
- raise ValueError("The 'request' fixture must be passed to the test function as a parameter.")
+ raise ValueError("The 'request' fixture must be an argument of the test function.")
+ if mock is None:
+ raise ValueError("The 'mock' variable must be an argument of the test function.")
- # The function `is_robot_available` is defined in `tests/conftest.py`
- if not request.getfixturevalue("is_robot_available"):
+ # Run test with a real robot. Skip test if robot connection fails.
+ if not mock and not request.getfixturevalue("is_robot_available"):
pytest.skip(f"A {robot_type} robot is not available.")
+
+ return func(*args, **kwargs)
+
+ return wrapper
+
+
+def require_camera(func):
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ # Access the pytest request context to get the is_camera_available fixture
+ request = kwargs.get("request")
+ camera_type = kwargs.get("camera_type")
+ mock = kwargs.get("mock")
+
+ if request is None:
+ raise ValueError("The 'request' fixture must be an argument of the test function.")
+ if camera_type is None:
+ raise ValueError("The 'camera_type' must be an argument of the test function.")
+ if mock is None:
+ raise ValueError("The 'mock' variable must be an argument of the test function.")
+
+ if not mock and not request.getfixturevalue("is_camera_available"):
+ pytest.skip(f"A {camera_type} camera is not available.")
+
return func(*args, **kwargs)
return wrapper
+
+
+def require_motor(func):
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ # Access the pytest request context to get the is_motor_available fixture
+ request = kwargs.get("request")
+ motor_type = kwargs.get("motor_type")
+ mock = kwargs.get("mock")
+
+ if request is None:
+ raise ValueError("The 'request' fixture must be an argument of the test function.")
+ if motor_type is None:
+ raise ValueError("The 'motor_type' must be an argument of the test function.")
+ if mock is None:
+ raise ValueError("The 'mock' variable must be an argument of the test function.")
+
+ if not mock and not request.getfixturevalue("is_motor_available"):
+ pytest.skip(f"A {motor_type} motor is not available.")
+
+ return func(*args, **kwargs)
+
+ return wrapper
+
+
+def mock_calibration_dir(calibration_dir):
+ # TODO(rcadene): remove this hack
+ # calibration file produced with Moss v1, but works with Koch, Koch bimanual and SO-100
+ example_calib = {
+ "homing_offset": [-1416, -845, 2130, 2872, 1950, -2211],
+ "drive_mode": [0, 0, 1, 1, 1, 0],
+ "start_pos": [1442, 843, 2166, 2849, 1988, 1835],
+ "end_pos": [2440, 1869, -1106, -1848, -926, 3235],
+ "calib_mode": ["DEGREE", "DEGREE", "DEGREE", "DEGREE", "DEGREE", "LINEAR"],
+ "motor_names": ["shoulder_pan", "shoulder_lift", "elbow_flex", "wrist_flex", "wrist_roll", "gripper"],
+ }
+ Path(str(calibration_dir)).mkdir(parents=True, exist_ok=True)
+ with open(calibration_dir / "main_follower.json", "w") as f:
+ json.dump(example_calib, f)
+ with open(calibration_dir / "main_leader.json", "w") as f:
+ json.dump(example_calib, f)
+ with open(calibration_dir / "left_follower.json", "w") as f:
+ json.dump(example_calib, f)
+ with open(calibration_dir / "left_leader.json", "w") as f:
+ json.dump(example_calib, f)
+ with open(calibration_dir / "right_follower.json", "w") as f:
+ json.dump(example_calib, f)
+ with open(calibration_dir / "right_leader.json", "w") as f:
+ json.dump(example_calib, f)
+
+
+def make_robot(robot_type: str, overrides: list[str] | None = None, mock=False) -> Robot:
+ if mock:
+ overrides = [] if overrides is None else copy(overrides)
+
+ # Explicitely add mock argument to the cameras and set it to true
+ # TODO(rcadene, aliberts): redesign when we drop hydra
+ if robot_type in ["koch", "so100", "moss"]:
+ overrides.append("+leader_arms.main.mock=true")
+ overrides.append("+follower_arms.main.mock=true")
+ if "~cameras" not in overrides:
+ overrides.append("+cameras.laptop.mock=true")
+ overrides.append("+cameras.phone.mock=true")
+
+ elif robot_type == "koch_bimanual":
+ overrides.append("+leader_arms.left.mock=true")
+ overrides.append("+leader_arms.right.mock=true")
+ overrides.append("+follower_arms.left.mock=true")
+ overrides.append("+follower_arms.right.mock=true")
+ if "~cameras" not in overrides:
+ overrides.append("+cameras.laptop.mock=true")
+ overrides.append("+cameras.phone.mock=true")
+
+ elif robot_type == "aloha":
+ overrides.append("+leader_arms.left.mock=true")
+ overrides.append("+leader_arms.right.mock=true")
+ overrides.append("+follower_arms.left.mock=true")
+ overrides.append("+follower_arms.right.mock=true")
+ if "~cameras" not in overrides:
+ overrides.append("+cameras.cam_high.mock=true")
+ overrides.append("+cameras.cam_low.mock=true")
+ overrides.append("+cameras.cam_left_wrist.mock=true")
+ overrides.append("+cameras.cam_right_wrist.mock=true")
+
+ else:
+ raise NotImplementedError(robot_type)
+
+ config_path = ROBOT_CONFIG_PATH_TEMPLATE.format(robot=robot_type)
+ robot_cfg = init_hydra_config(config_path, overrides)
+ robot = make_robot_from_cfg(robot_cfg)
+ return robot
+
+
+def make_camera(camera_type, **kwargs) -> Camera:
+ if camera_type == "opencv":
+ from lerobot.common.robot_devices.cameras.opencv import OpenCVCamera
+
+ camera_index = kwargs.pop("camera_index", OPENCV_CAMERA_INDEX)
+ return OpenCVCamera(camera_index, **kwargs)
+
+ elif camera_type == "intelrealsense":
+ from lerobot.common.robot_devices.cameras.intelrealsense import IntelRealSenseCamera
+
+ camera_index = kwargs.pop("camera_index", INTELREALSENSE_CAMERA_INDEX)
+ return IntelRealSenseCamera(camera_index, **kwargs)
+
+ else:
+ raise ValueError(f"The camera type '{camera_type}' is not valid.")
+
+
+def make_motors_bus(motor_type: str, **kwargs) -> MotorsBus:
+ if motor_type == "dynamixel":
+ from lerobot.common.robot_devices.motors.dynamixel import DynamixelMotorsBus
+
+ port = kwargs.pop("port", DYNAMIXEL_PORT)
+ motors = kwargs.pop("motors", DYNAMIXEL_MOTORS)
+ return DynamixelMotorsBus(port, motors, **kwargs)
+
+ elif motor_type == "feetech":
+ from lerobot.common.robot_devices.motors.feetech import FeetechMotorsBus
+
+ port = kwargs.pop("port", FEETECH_PORT)
+ motors = kwargs.pop("motors", FEETECH_MOTORS)
+ return FeetechMotorsBus(port, motors, **kwargs)
+
+ else:
+ raise ValueError(f"The motor type '{motor_type}' is not valid.")