diff --git a/.github/workflows/build-and-push.yml b/.github/workflows/build-and-push.yml
index fab2af2b..d5e710eb 100644
--- a/.github/workflows/build-and-push.yml
+++ b/.github/workflows/build-and-push.yml
@@ -18,7 +18,10 @@ jobs:
contents: read
deployments: write
id-token: write
- runs-on: ubuntu-latest
+ runs-on: ubuntu-24.04
+ env:
+ # Disable docker compose volume mounts in docker-compose.override.yml
+ COMPOSE_FILE: docker-compose.yml
steps:
- uses: actions/checkout@v4
- name: Get info
@@ -36,22 +39,22 @@ jobs:
"$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID" > version.json
- name: Output version.json
run: cat version.json
+ - name: Install just
+ run: sudo apt-get update && sudo apt-get install -y just
- name: Build Docker images
- run: make build
+ run: just build
- name: Verify requirements.txt contains correct dependencies
run: |
- docker compose run --rm --no-deps test-ci bash ./bin/run_verify_reqs.sh
+ just verify-reqs
- name: Run lint check
run: |
- make .env
- docker compose run --rm --no-deps test-ci bash ./bin/run_lint.sh
+ just lint
- name: Run Eliot tests
run: |
- docker compose up -d fakesentry statsd
- docker compose run --rm test-ci bash ./bin/run_test.sh eliot
+ just test
- name: Build docs
run: |
- docker compose run --rm --no-deps test-ci bash make -C docs/ html
+ just docs
- name: Set Docker image tag to "latest" for updates of the main branch
if: github.ref == 'refs/heads/main'
diff --git a/Makefile b/Makefile
deleted file mode 100644
index b8ea13eb..00000000
--- a/Makefile
+++ /dev/null
@@ -1,101 +0,0 @@
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
-
-# Include my.env and export it so variables set in there are available
-# in the Makefile.
-include .env
-export
-
-DOCKER := $(shell which docker)
-DC=${DOCKER} compose
-
-.DEFAULT_GOAL := help
-.PHONY: help
-help:
- @echo "Usage: make RULE"
- @echo ""
- @grep -E '^[a-zA-Z0-9_-]+:.*?## .*$$' Makefile \
- | grep -v grep \
- | sed -n 's/^\(.*\): \(.*\)##\(.*\)/\1\3/p' \
- | column -t -s '|'
- @echo ""
- @echo "Adjust your .env file to set configuration."
- @echo ""
- @echo "See https://mozilla-eliot.readthedocs.io/ for more documentation."
-
-# Dev configuration steps
-.docker-build:
- make build
-
-.devcontainer-build:
- make devcontainerbuild
-
-.env:
- ./bin/cp-env-file.sh
-
-.PHONY: build
-build: .env ## | Build docker images.
- ${DC} build --progress plain base
- ${DC} build --progress plain fakesentry statsd
- touch .docker-build
-
-.PHONY: run
-run: .env .docker-build ## | Run eliot and services.
- ${DC} up \
- --attach eliot \
- --attach fakesentry \
- eliot fakesentry
-
-.PHONY: devcontainerbuild
-devcontainerbuild: .env .docker-build .devcontainer-build ## | Build VS Code development container.
- ${DC} build --progress plain devcontainer
- touch .devcontainer-build
-
-.PHONY: devcontainer
-devcontainer: .env .docker-build ## | Run VS Code development container.
- ${DC} up --detach devcontainer
-
-.PHONY: stop
-stop: .env ## | Stop docker containers.
- ${DC} stop
-
-.PHONY: shell
-shell: .env .docker-build ## | Open a shell in eliot service container.
- ${DC} run --rm eliot bash
-
-.PHONY: clean
-clean: .env stop ## | Stop and remove docker containers and artifacts.
- ${DC} rm -f
- rm -fr .docker-build
-
-.PHONY: test
-test: .env .docker-build ## | Run Python unit test suite.
- ${DC} up -d fakesentry statsd
- ${DC} run --rm test bash ./bin/run_test.sh
-
-.PHONY: testshell
-testshell: .env .docker-build ## | Open shell in test environment.
- ${DC} up -d fakesentry statsd
- ${DC} run --rm test bash ./bin/run_test.sh --shell
-
-.PHONY: docs
-docs: .env .docker-build ## | Build docs.
- ${DC} run --rm --no-deps eliot bash make -C docs/ clean
- ${DC} run --rm --no-deps eliot bash make -C docs/ html
-
-.PHONY: lint
-lint: .env .docker-build ## | Lint code.
- ${DC} run --rm --no-deps test bash ./bin/run_lint.sh
-
-.PHONY: lintfix
-lintfix: .env .docker-build ## | Reformat code.
- ${DC} run --rm --no-deps test bash ./bin/run_lint.sh --fix
-
-.PHONY: rebuildreqs
-rebuildreqs: .env .docker-build ## | Rebuild requirements.txt file after requirements.in changes.
- ${DC} run --rm --no-deps eliot bash pip-compile --generate-hashes --strip-extras
-
-.PHONY: updatereqs
-updatereqs: .env .docker-build ## | Update deps in requirements.txt file.
- ${DC} run --rm --no-deps eliot bash pip-compile --generate-hashes --strip-extras -U
diff --git a/bin/license-check.py b/bin/license-check.py
deleted file mode 100755
index 05b75528..00000000
--- a/bin/license-check.py
+++ /dev/null
@@ -1,152 +0,0 @@
-#!/usr/bin/env python
-
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at https://mozilla.org/MPL/2.0/.
-
-"""
-This script checks files for license headers.
-
-This requires Python 3.8+ to run.
-
-See https://github.com/willkg/socorro-release/#readme for details.
-
-repo: https://github.com/willkg/socorro-release/
-sha: d19f45bc9eedae34de2905cdd4adf7b9fd03f870
-
-"""
-
-import argparse
-import pathlib
-import subprocess
-import sys
-
-
-DESCRIPTION = (
- "Checks files in specified directory for license headers. "
- + "If you don't specify a target, it'll check all files in \"git ls-files\"."
-)
-
-# From https://www.mozilla.org/en-US/MPL/2.0/
-MPLV2 = [
- "This Source Code Form is subject to the terms of the Mozilla Public",
- "License, v. 2.0. If a copy of the MPL was not distributed with this",
- "file, You can obtain one at https://mozilla.org/MPL/2.0/.",
-]
-
-
-LANGUAGE_DATA = {".py": {"comment": ("#",)}}
-
-
-def is_code_file(path: pathlib.Path):
- """Determines whether the file is a code file we need to check.
-
- :param path: the Path for the file
-
- :returns: True if it's a code file to check, False otherwise.
-
- """
- if not path.is_file():
- return False
- ending: pathlib.Path = path.suffix
- return ending in LANGUAGE_DATA
-
-
-def has_license_header(path: pathlib.Path):
- """Determines if file at path has an MPLv2 license header.
-
- :param path: the Path for the file
-
- :returns: True if it does, False if it doesn't.
-
- """
- ending: pathlib.Path = path.suffix
- comment_indicators = LANGUAGE_DATA[ending]["comment"]
-
- header = []
- with open(path, "r") as fp:
- firstline = True
- for line in fp.readlines():
- if firstline and line.startswith("#!"):
- firstline = False
- continue
-
- line = line.strip()
- # NOTE(willkg): this doesn't handle multiline comments like in C++
- for indicator in comment_indicators:
- line = line.strip(indicator)
- line = line.strip()
-
- # Skip blank lines
- if not line:
- continue
-
- header.append(line)
- if len(header) == len(MPLV2):
- if header[: len(MPLV2)] == MPLV2:
- return True
- else:
- break
-
- return False
-
-
-def main(args):
- parser = argparse.ArgumentParser(description=DESCRIPTION)
- parser.add_argument(
- "-l", "--file-only", action="store_true", help="print files only"
- )
- parser.add_argument("--verbose", action="store_true", help="verbose output")
- parser.add_argument("target", help="file or directory tree to check", nargs="?")
-
- parsed = parser.parse_args(args)
-
- if parsed.target:
- target = pathlib.Path(parsed.target)
- if not target.exists():
- if not parsed.file_only:
- print(f"Not a valid file or directory: {target}")
- return 1
-
- if target.is_file():
- targets = [target]
-
- elif target.is_dir():
- targets = list(target.rglob("*"))
-
- else:
- ret = subprocess.check_output(["git", "ls-files"])
- targets = [
- pathlib.Path(target.strip()) for target in ret.decode("utf-8").splitlines()
- ]
-
- missing_headers = 0
-
- # Iterate through all the files in this target directory
- for path in targets:
- if parsed.verbose:
- print(f"Checking {path}")
- if is_code_file(path) and not has_license_header(path):
- missing_headers += 1
- if parsed.file_only:
- print(str(path))
- else:
- print(f"File {path} does not have license header.")
-
- if missing_headers > 0:
- if not parsed.file_only:
- print(f"Files with missing headers: {missing_headers}")
- print("")
- print("Add this:")
- print("")
- print("\n".join(MPLV2))
- return 1
-
- if not parsed.file_only:
- print("No files missing headers.")
-
- return 0
-
-
-if __name__ == "__main__":
- sys.exit(main(sys.argv[1:]))
diff --git a/bin/release.py b/bin/release.py
deleted file mode 100755
index 6912035d..00000000
--- a/bin/release.py
+++ /dev/null
@@ -1,483 +0,0 @@
-#!/usr/bin/env python
-
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at https://mozilla.org/MPL/2.0/.
-
-"""
-This script handles releases for this project.
-
-This has two subcommands: ``make-bug`` and ``make-tag``. See the help text for
-both.
-
-This requires Python 3.8+ to run.
-
-Note: If you want to use ``pyproject.toml`` and you're using Python <3.11, this
-also requires the tomli library.
-
-See https://github.com/willkg/socorro-release/#readme for details.
-
-repo: https://github.com/willkg/socorro-release/
-sha: d19f45bc9eedae34de2905cdd4adf7b9fd03f870
-
-"""
-
-import argparse
-import configparser
-import datetime
-import json
-import os
-import re
-import shlex
-import subprocess
-import sys
-from urllib.parse import urlencode
-from urllib.request import urlopen
-
-
-DESCRIPTION = """
-release.py makes it easier to create deploy bugs and push tags to trigger
-deploys.
-
-For help, see: https://github.com/willkg/socorro-release/
-"""
-
-GITHUB_API = "https://api.github.com/"
-BZ_CREATE_URL = "https://bugzilla.mozilla.org/enter_bug.cgi"
-BZ_BUG_JSON_URL = "https://bugzilla.mozilla.org/rest/bug/"
-
-DEFAULT_CONFIG = {
- # Bugzilla product and component to write new bugs in
- "bugzilla_product": "",
- "bugzilla_component": "",
- # GitHub user and project name
- "github_user": "",
- "github_project": "",
- # The name of the main branch
- "main_branch": "",
- # The tag structure using datetime formatting markers
- "tag_name_template": "%Y.%m.%d",
-}
-
-LINE = "=" * 80
-
-# Recognize "bug-NNNNNNN", "bug NNNNNNN", and multi-bug variants
-BUG_RE = re.compile(r"\bbug(?:s?:?\s*|-)([\d\s,\+and]+)\b", re.IGNORECASE)
-
-# Recognize "bug-NNNNNNN"
-BUG_HYPHEN_PREFIX_RE = re.compile(r"bug-([\d]+)", re.IGNORECASE)
-
-
-def get_config():
- """Generates configuration.
-
- This tries to pull configuration from:
-
- 1. the ``[tool.release]`` table from a ``pyproject.toml`` file, OR
- 2. the ``[tool:release]`` section of a ``setup.cfg`` file
-
- If neither exist, then it uses defaults.
-
- :returns: configuration dict
-
- """
- my_config = dict(DEFAULT_CONFIG)
-
- if os.path.exists("pyproject.toml"):
- if sys.version_info >= (3, 11):
- import tomllib
- else:
- try:
- import tomli as tomllib
- except ImportError:
- print(
- "For Python <3.11, you need to install tomli to work with pyproject.toml "
- + "files."
- )
- tomllib = None
-
- if tomllib is not None:
- with open("pyproject.toml", "rb") as fp:
- data = tomllib.load(fp)
-
- config_data = data.get("tool", {}).get("release", {})
- if config_data:
- for key, default_val in my_config.items():
- my_config[key] = config_data.get(key, default_val)
- return my_config
-
- if os.path.exists("setup.cfg"):
- config = configparser.ConfigParser()
- config.read("setup.cfg")
-
- if "tool:release" in config:
- config = config["tool:release"]
- for key, default_val in my_config.items():
- my_config[key] = config.get(key, default_val)
-
- return my_config
-
- return my_config
-
-
-def find_bugs(line):
- """Returns all the bug numbers from the line.
-
- >>> get_bug_numbers("some line")
- []
- >>> get_bug_numbers("bug-1111111: some line")
- ["1111111"]
- >>> get_bug_numbers("bug 1111111, 2222222: some line")
- ["1111111", "2222222"]
-
- """
- matches = BUG_RE.findall(line)
- if not matches:
- return []
- bugs = []
- for match in matches:
- for part in re.findall(r"\d+", match):
- if part:
- bugs.append(part)
- return bugs
-
-
-def fetch(url, is_json=True):
- """Fetch data from a url
-
- This raises URLError on HTTP request errors. It also raises JSONDecode
- errors if it's not valid JSON.
-
- """
- fp = urlopen(url)
- data = fp.read()
- if is_json:
- return json.loads(data)
- return data
-
-
-def fetch_history_from_github(owner, repo, from_rev, main_branch):
- url = f"{GITHUB_API}repos/{owner}/{repo}/compare/{from_rev}...{main_branch}"
- return fetch(url)
-
-
-def check_output(cmdline, **kwargs):
- args = shlex.split(cmdline)
- return subprocess.check_output(args, **kwargs).decode("utf-8").strip()
-
-
-def get_remote_name(github_user):
- """Figures out the right remote to use
-
- People name the git remote differently, so this figures out which one to
- use.
-
- :arg str github_user: the github user for the remote name to use
-
- :returns: the name of the remote
-
- :raises Exception: if it can't figure out the remote name for the specified
- user
-
- """
- # Figure out remote to push tag to
- remote_output = check_output("git remote -v")
-
- def check_ssh(github_user, remote_url):
- return f":{github_user}/" in remote_url
-
- def check_https(github_user, remote_url):
- return f"/{github_user}/" in remote_url
-
- for line in remote_output.splitlines():
- line = line.split("\t")
- if check_ssh(github_user, line[1]) or check_https(github_user, line[1]):
- return line[0]
-
- raise Exception(f"Can't figure out remote name for {github_user}.")
-
-
-def make_tag(
- bug_number,
- github_project,
- github_user,
- remote_name,
- tag_name,
- commits_since_tag,
-):
- """Tags a release."""
- if bug_number:
- resp = fetch(BZ_BUG_JSON_URL + bug_number, is_json=True)
- bug_summary = resp["bugs"][0]["summary"]
-
- input(f">>> Using bug {bug_number}: {bug_summary}. Correct? Ctrl-c to cancel")
-
- message = (
- f"Tag {tag_name} (bug #{bug_number})\n\n"
- + "\n".join(commits_since_tag)
- + f"\n\nDeploy bug #{bug_number}"
- )
- else:
- message = f"Tag {tag_name}\n\n" + "\n".join(commits_since_tag)
-
- # Print out new tag information
- print("")
- print(">>> New tag: %s" % tag_name)
- print(">>> Tag message:")
- print(LINE)
- print(message)
- print(LINE)
-
- # Create tag
- input(f">>> Ready to tag {tag_name}? Ctrl-c to cancel")
- print("")
- print(">>> Creating tag...")
- subprocess.check_call(["git", "tag", "-s", tag_name, "-m", message])
-
- # Push tag
- input(f">>> Ready to push to remote {remote_name}? Ctrl-c to cancel")
- print("")
- print(">>> Pushing...")
- subprocess.check_call(["git", "push", "--tags", remote_name, tag_name])
-
- if bug_number:
- # Show url to tag information on GitHub for bug comment
- tag_url = (
- f"https://github.com/{github_user}/{github_project}/releases/tag/{tag_name}"
- )
- print("")
- print(f">>> Copy and paste this tag url into bug #{bug_number}.")
- print(">>> %<-----------------------------------------------")
- print(f"{tag_url}")
- print(">>> %<-----------------------------------------------")
-
-
-def make_bug(
- github_project,
- tag_name,
- commits_since_tag,
- bugs_referenced,
- bugzilla_product,
- bugzilla_component,
-):
- """Creates a bug."""
- summary = f"{github_project} deploy: {tag_name}"
- print(">>> Creating deploy bug...")
- print(">>> Summary")
- print(summary)
- print()
-
- description = [
- f"We want to do a deploy for `{github_project}` tagged `{tag_name}`.",
- "",
- "It consists of the following commits:",
- "",
- ]
- description.extend(commits_since_tag)
- if bugs_referenced:
- description.append("")
- description.append("Bugs referenced:")
- description.append("")
- for bug in sorted(bugs_referenced):
- description.append(f"* bug #{bug}")
- description = "\n".join(description)
-
- print(">>> Description")
- print(description)
- print()
-
- if bugzilla_product:
- bz_params = {
- "priority": "P2",
- "bug_type": "task",
- "comment": description,
- "form_name": "enter_bug",
- "short_desc": summary,
- }
-
- bz_params["product"] = bugzilla_product
- if bugzilla_component:
- bz_params["component"] = bugzilla_component
-
- bugzilla_link = BZ_CREATE_URL + "?" + urlencode(bz_params)
- print(">>> Link to create bug (may not work if it's sufficiently long)")
- print(bugzilla_link)
-
-
-def run():
- config = get_config()
-
- parser = argparse.ArgumentParser(description=DESCRIPTION)
-
- # Add items that can be configured to argparse as configuration options.
- # This makes it possible to specify or override configuration with command
- # line arguments.
- for key, val in config.items():
- key_arg = key.replace("_", "-")
- default_val = val.replace("%", "%%")
- parser.add_argument(
- f"--{key_arg}",
- default=val,
- help=f"override configuration {key}; defaults to {default_val!r}",
- )
-
- subparsers = parser.add_subparsers(dest="cmd")
- subparsers.required = True
-
- subparsers.add_parser("make-bug", help="Make a deploy bug")
- make_tag_parser = subparsers.add_parser("make-tag", help="Make a tag and push it")
- make_tag_parser.add_argument(
- "--with-bug", dest="bug", help="Bug for this deploy if any."
- )
- make_tag_parser.add_argument(
- "--with-tag",
- dest="tag",
- help="Tag to use; defaults to figuring out the tag using tag_name_template.",
- )
-
- args = parser.parse_args()
-
- github_project = args.github_project
- github_user = args.github_user
- main_branch = args.main_branch
- tag_name_template = args.tag_name_template
-
- if not github_project or not github_user or not main_branch:
- print("main_branch, github_project, and github_user are required.")
- print(
- "Either set them in pyproject.toml/setup.cfg or specify them as command "
- + "line arguments."
- )
- return 1
-
- # Let's make sure we're up-to-date and on main branch
- current_branch = check_output("git rev-parse --abbrev-ref HEAD")
- if current_branch != main_branch:
- print(
- f"Must be on the {main_branch} branch to do this; currently on {current_branch}"
- )
- return 1
-
- # The current branch can't be dirty
- try:
- subprocess.check_call("git diff --quiet --ignore-submodules HEAD".split())
- except subprocess.CalledProcessError:
- print(
- "Can't be \"git dirty\" when we're about to git pull. "
- "Stash or commit what you're working on."
- )
- return 1
-
- remote_name = get_remote_name(github_user)
-
- # Get existing git tags from remote
- check_output(
- f"git pull {remote_name} {main_branch} --tags", stderr=subprocess.STDOUT
- )
-
- # Figure out the most recent tag details
- all_tags = check_output("git tag --list --sort=-creatordate").splitlines()
- if all_tags:
- last_tag = all_tags[0]
- last_tag_message = check_output(f'git tag -l --format="%(contents)" {last_tag}')
- print(f">>> Last tag was: {last_tag}")
- print(">>> Message:")
- print(LINE)
- print(last_tag_message)
- print(LINE)
-
- resp = fetch_history_from_github(
- github_user, github_project, last_tag, main_branch
- )
- if resp["status"] != "ahead":
- print(f"Nothing to deploy! {resp['status']}")
- return
- else:
- first_commit = check_output("git rev-list --max-parents=0 HEAD")
- resp = fetch_history_from_github(github_user, github_project, first_commit)
-
- bugs_referenced = set()
- commits_since_tag = []
- for commit in resp["commits"]:
- # Skip merge commits
- if len(commit["parents"]) > 1:
- continue
-
- # Use the first 7 characters of the commit sha
- sha = commit["sha"][:7]
-
- # Use the first line of the commit message which is the summary and
- # truncate it to 80 characters
- summary = commit["commit"]["message"]
- summary = summary.splitlines()[0]
- summary = summary[:80]
-
- # Bug 1868455: While GitHub autolinking doesn't suport spaces, Bugzilla
- # autolinking doesn't support hyphens. When creating a bug, we want to
- # use "bug NNNNNNN" form so Bugzilla autolinking works.
- if args.cmd == "make-bug":
- summary = BUG_HYPHEN_PREFIX_RE.sub(r"bug \1", summary)
-
- bugs = find_bugs(summary)
- if bugs:
- bugs_referenced |= set(bugs)
-
- # Figure out who did the commit prefering GitHub usernames
- who = commit["author"]
- if not who:
- who = "?"
- else:
- who = who.get("login", "?")
-
- commits_since_tag.append("`%s`: %s (%s)" % (sha, summary, who))
-
- # Use specified tag or figure out next tag name as YYYY.MM.DD format
- if args.cmd == "make-tag" and args.tag:
- tag_name = args.tag
- else:
- tag_name = datetime.datetime.now().strftime(tag_name_template)
-
- # If there's already a tag, then increment the -N until we find a tag name
- # that doesn't exist, yet
- existing_tags = check_output(f'git tag -l "{tag_name}*"').splitlines()
- if existing_tags:
- tag_name_attempt = tag_name
- index = 2
- while tag_name_attempt in existing_tags:
- tag_name_attempt = f"{tag_name}-{index}"
- index += 1
- tag_name = tag_name_attempt
-
- if args.cmd == "make-bug":
- make_bug(
- github_project,
- tag_name,
- commits_since_tag,
- bugs_referenced,
- args.bugzilla_product,
- args.bugzilla_component,
- )
-
- elif args.cmd == "make-tag":
- if args.bugzilla_product and args.bugzilla_component and not args.bug:
- print(
- "Bugzilla product and component are specified, but you didn't "
- + "specify a bug number with --with-bug."
- )
- return 1
- make_tag(
- args.bug,
- github_project,
- github_user,
- remote_name,
- tag_name,
- commits_since_tag,
- )
-
- else:
- parser.print_help()
- return 1
-
-
-if __name__ == "__main__":
- sys.exit(run())
diff --git a/bin/run_lint.sh b/bin/run_lint.sh
index e5666b42..a376a1c6 100755
--- a/bin/run_lint.sh
+++ b/bin/run_lint.sh
@@ -2,7 +2,7 @@
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+# file, You can obtain one at https://mozilla.org/MPL/2.0/.
# Usage: bin/run_lint.sh [--fix]
#
@@ -10,30 +10,37 @@
#
# This should be called from inside a container.
-set -e
+set -euo pipefail
FILES="bin eliot tests"
PYTHON_VERSION=$(python --version)
-if [[ $1 == "--fix" ]]; then
+
+if [[ "${1:-}" == "--help" ]]; then
+ echo "Usage: $0 [OPTIONS]"
+ echo
+ echo " Lint code"
+ echo
+ echo "Options:"
+ echo " --help Show this message and exit."
+ echo " --fix Reformat code."
+elif [[ "${1:-}" == "--fix" ]]; then
echo ">>> ruff fix (${PYTHON_VERSION})"
ruff format $FILES
ruff check --fix $FILES
-
else
echo ">>> ruff (${PYTHON_VERSION})"
- cd /app
ruff check $FILES
ruff format --check $FILES
echo ">>> license check (${PYTHON_VERSION})"
if [[ -d ".git" ]]; then
- # If the .git directory exists, we can let license-check.py do
+ # If the .git directory exists, we can let license-check do
# git ls-files.
- python bin/license-check.py
+ license-check
else
# The .git directory doesn't exist, so run it on all the Python
# files in the tree.
- python bin/license-check.py .
+ license-check .
fi
fi
diff --git a/bin/sentry-wrap.py b/bin/sentry-wrap.py
deleted file mode 100755
index 4fbdb47a..00000000
--- a/bin/sentry-wrap.py
+++ /dev/null
@@ -1,112 +0,0 @@
-#!/usr/bin/env python
-
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at https://mozilla.org/MPL/2.0/.
-
-# Wraps a command such that if it fails, an error report is sent to the Sentry service
-# specified by ELIOT_SECRET_SENTRY_DSN in the environment.
-#
-# Usage: python bin/sentry-wrap.py wrap-process -- [CMD]
-# Wraps a process in error-reporting Sentry goodness.
-#
-# Usage: python bin/sentry-wrap.py test-sentry
-# Tests Sentry configuration and connection.
-
-
-import os
-import shlex
-import subprocess
-import sys
-import time
-import traceback
-
-import click
-import sentry_sdk
-from sentry_sdk import capture_exception, capture_message
-
-
-SENTRY_DSN_VAR = "ELIOT_SECRET_SENTRY_DSN"
-
-
-@click.group()
-def cli_main():
- pass
-
-
-@cli_main.command()
-@click.pass_context
-def test_sentry(ctx):
- sentry_dsn = os.environ.get(SENTRY_DSN_VAR)
-
- if not sentry_dsn:
- click.echo(f"{SENTRY_DSN_VAR} is not defined. Exiting.")
- sys.exit(1)
-
- sentry_sdk.init(sentry_dsn)
- capture_message("Sentry test")
- click.echo("Success. Check Sentry.")
-
-
-@cli_main.command()
-@click.option(
- "--timeout",
- default=300,
- help="Timeout in seconds to wait for process before giving up.",
-)
-@click.argument("cmd", nargs=-1)
-@click.pass_context
-def wrap_process(ctx, timeout, cmd):
- sentry_dsn = os.environ.get(SENTRY_DSN_VAR)
-
- if not sentry_dsn:
- click.echo(f"{SENTRY_DSN_VAR} is not defined. Exiting.")
- sys.exit(1)
-
- if not cmd:
- raise click.UsageError("CMD required")
-
- start_time = time.time()
-
- sentry_sdk.init(sentry_dsn)
-
- cmd = " ".join(cmd)
- cmd_args = shlex.split(cmd)
- click.echo(f"Running: {cmd_args}")
-
- try:
- ret = subprocess.run(cmd_args, capture_output=True, timeout=timeout)
- if ret.returncode != 0:
- sentry_sdk.set_context(
- "status",
- {
- "exit_code": ret.returncode,
- "stdout": ret.stdout.decode("utf-8"),
- "stderr": ret.stderr.decode("utf-8"),
- },
- )
- capture_message(f"Command {cmd!r} failed.")
- click.echo(ret.stdout.decode("utf-8"))
- click.echo(ret.stderr.decode("utf-8"))
- time_delta = (time.time() - start_time) / 1000
- click.echo(f"Fail. {time_delta:.2f}s")
- ctx.exit(1)
-
- else:
- click.echo(ret.stdout.decode("utf-8"))
- time_delta = (time.time() - start_time) / 1000
- click.echo(f"Success! {time_delta:.2f}s")
-
- except click.exceptions.Exit:
- raise
-
- except Exception as exc:
- capture_exception(exc)
- click.echo(traceback.format_exc())
- time_delta = (time.time() - start_time) / 1000
- click.echo(f"Fail. {time_delta:.2f}s")
- ctx.exit(1)
-
-
-if __name__ == "__main__":
- cli_main()
diff --git a/bin/service-status.py b/bin/service-status.py
deleted file mode 100755
index 7bd5a161..00000000
--- a/bin/service-status.py
+++ /dev/null
@@ -1,219 +0,0 @@
-#!/usr/bin/env python
-
-# This Source Code Form is subject to the terms of the Mozilla Public
-# License, v. 2.0. If a copy of the MPL was not distributed with this
-# file, You can obtain one at https://mozilla.org/MPL/2.0/.
-
-"""
-This script looks at the ``/__version__`` endpoint information and tells you
-how far behind different server environments are from main tip.
-
-This requires Python 3.8+ to run. See help text for more.
-
-See https://github.com/willkg/socorro-release/#readme for details.
-
-Note: If you want to use ``pyproject.toml`` and you're using Python <3.11, this
-also requires the tomli library.
-
-repo: https://github.com/willkg/socorro-release/
-sha: d19f45bc9eedae34de2905cdd4adf7b9fd03f870
-
-"""
-
-import argparse
-import json
-import os
-import sys
-from urllib.parse import urlparse
-from urllib.request import urlopen
-
-
-DESCRIPTION = """
-service-status.py tells you how far behind different server environments
-are from main tip.
-
-For help, see: https://github.com/willkg/socorro-release/
-"""
-
-DEFAULT_CONFIG = {
- # The name of the main branch in the repository
- "main_branch": "main",
- # List of "label=host" for hosts that have a /__version__ to check
- "hosts": [],
-}
-
-
-def get_config():
- """Generates configuration.
-
- This tries to pull configuration from the ``[tool.service-status]`` table
- from a ``pyproject.toml`` file.
-
- If neither exist, then it uses defaults.
-
- :returns: configuration dict
-
- """
- my_config = dict(DEFAULT_CONFIG)
-
- if os.path.exists("pyproject.toml"):
- if sys.version_info >= (3, 11):
- import tomllib
- else:
- try:
- import tomli as tomllib
- except ImportError:
- print(
- "For Python <3.11, you need to install tomli to work with pyproject.toml "
- + "files."
- )
- tomllib = None
-
- if tomllib is not None:
- with open("pyproject.toml", "rb") as fp:
- data = tomllib.load(fp)
-
- config_data = data.get("tool", {}).get("service-status", {})
- if config_data:
- for key, default_val in my_config.items():
- my_config[key] = config_data.get(key, default_val)
-
- return my_config
-
-
-def fetch(url, is_json=True):
- """Fetch data from a url
-
- This raises URLError on HTTP request errors. It also raises JSONDecode
- errors if it's not valid JSON.
-
- """
- fp = urlopen(url, timeout=5)
- data = fp.read()
- if is_json:
- return json.loads(data)
- return data
-
-
-def fetch_history_from_github(main_branch, user, repo, from_sha):
- return fetch(
- "https://api.github.com/repos/%s/%s/compare/%s...%s"
- % (user, repo, from_sha, main_branch)
- )
-
-
-class StdoutOutput:
- def section(self, name):
- print("")
- print("%s" % name)
- print("=" * len(name))
- print("")
-
- def row(self, *args):
- template = "%-13s " * len(args)
- print(" " + template % args)
-
- def print_delta(self, main_branch, user, repo, sha):
- resp = fetch_history_from_github(main_branch, user, repo, sha)
- # from pprint import pprint
- # pprint(resp)
- if resp["total_commits"] == 0:
- self.row("", "status", "identical")
- else:
- self.row("", "status", "%s commits" % resp["total_commits"])
- self.row()
- self.row(
- "",
- "https://github.com/%s/%s/compare/%s...%s"
- % (
- user,
- repo,
- sha[:8],
- main_branch,
- ),
- )
- self.row()
- for i, commit in enumerate(resp["commits"]):
- if len(commit["parents"]) > 1:
- # Skip merge commits
- continue
-
- self.row(
- "",
- commit["sha"][:8],
- ("HEAD: " if i == 0 else "")
- + "%s (%s)"
- % (
- commit["commit"]["message"].splitlines()[0][:60],
- (commit["author"] or {}).get("login", "?")[:10],
- ),
- )
- self.row()
-
-
-def main():
- config = get_config()
-
- parser = argparse.ArgumentParser(description=DESCRIPTION)
-
- # Add items that can be configured to argparse as configuration options.
- # This makes it possible to specify or override configuration with command
- # line arguments.
- for key, val in config.items():
- key_arg = key.replace("_", "-")
- if isinstance(val, list):
- parser.add_argument(
- f"--{key_arg}",
- default=val,
- nargs="+",
- metavar="VALUE",
- help=f"override configuration {key}; defaults to {val!r}",
- )
- else:
- default_val = val.replace("%", "%%")
- parser.add_argument(
- f"--{key_arg}",
- default=val,
- metavar="VALUE",
- help=f"override configuration {key}; defaults to {default_val!r}",
- )
-
- args = parser.parse_args()
-
- main_branch = args.main_branch
- hosts = args.hosts
-
- out = StdoutOutput()
-
- if not hosts:
- print("no hosts specified.")
- return 1
-
- current_section = ""
-
- for line in hosts:
- parts = line.split("=", 1)
- if len(parts) == 1:
- service = parts[0]
- env_name = "environment"
- else:
- env_name, service = parts
-
- if current_section != env_name:
- out.section(env_name)
- current_section = env_name
-
- service = service.rstrip("/")
- resp = fetch(f"{service}/__version__")
- commit = resp["commit"]
- tag = resp.get("version") or "(none)"
-
- parsed = urlparse(resp["source"])
- _, user, repo = parsed.path.split("/")
- service_name = repo
- out.row(service_name, "version", commit, tag)
- out.print_delta(main_branch, user, repo, commit)
-
-
-if __name__ == "__main__":
- sys.exit(main())
diff --git a/docker-compose.override.yml b/docker-compose.override.yml
new file mode 100644
index 00000000..cea91e01
--- /dev/null
+++ b/docker-compose.override.yml
@@ -0,0 +1,14 @@
+---
+# version: '2.4'
+services:
+ base:
+ volumes:
+ - .:/app
+
+ eliot:
+ volumes:
+ - $PWD:/app
+
+ test:
+ volumes:
+ - $PWD:/app
diff --git a/docker-compose.yml b/docker-compose.yml
index c132b352..ac5758ac 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,5 +1,4 @@
---
-version: '2.4'
services:
# Base container is used for development tasks like tests, linting,
# and building docs.
@@ -27,8 +26,6 @@ services:
links:
- fakesentry
- statsd
- volumes:
- - $PWD:/app
command: ["eliot"]
# Container specifically for running tests.
@@ -40,17 +37,7 @@ services:
- docker/config/test.env
links:
- fakesentry
- volumes:
- - $PWD:/app
-
- test-ci:
- extends:
- service: base
- env_file:
- - docker/config/local_dev.env
- - docker/config/test.env
- links:
- - fakesentry
+ - statsd
devcontainer:
extends:
@@ -63,6 +50,8 @@ services:
image: eliot-devcontainer
entrypoint: ["sleep", "inf"]
stop_signal: SIGKILL # Doesn't seem to respond to anything else
+ volumes:
+ - $PWD:/app
# https://github.com/willkg/kent
fakesentry:
diff --git a/docs/dev.rst b/docs/dev.rst
index 0edc896c..e172f144 100644
--- a/docs/dev.rst
+++ b/docs/dev.rst
@@ -9,7 +9,7 @@ Development
Setup quickstart
================
-1. Install required software: Docker, make, and git.
+1. Install required software: Docker, just, and git.
**Linux**:
@@ -19,17 +19,17 @@ Setup quickstart
Install `Docker for Mac `_.
- Use `homebrew `_ to install make and git:
+ Use `homebrew `_ to install just and git:
.. code-block:: shell
- $ brew install make git
+ $ brew install just git
**Other**:
Install `Docker `_.
- Install `make `_.
+ Install `just `_.
Install `git `_.
@@ -45,13 +45,13 @@ Setup quickstart
.. code-block:: shell
- $ make .env
+ $ just _env
- Then edit the file and set the ``APP_UID`` and ``APP_GID`` variables. These
+ Then edit the file and set the ``USE_UID`` and ``USE_GID`` variables. These
will get used when creating the app user in the base image.
If you ever want different values, change them in ``.env`` and re-run
- ``make build``.
+ ``just build``.
4. Build Docker images.
@@ -59,7 +59,7 @@ Setup quickstart
.. code-block:: shell
- $ make build
+ $ just build
That will build the app Docker image required for development.
@@ -70,7 +70,7 @@ To run Eliot, do:
.. code-block:: shell
- $ make run
+ $ just run
The webapp is at ``__.
@@ -204,14 +204,14 @@ To lint all the code, do:
.. code-block:: bash
- $ make lint
+ $ just lint
To reformat all the code, do:
.. code-block:: bash
- $ make lintfix
+ $ just lint --fix
HTML/CSS conventions
@@ -245,13 +245,13 @@ To add a new dependency, add it to the file and then do:
.. code-block:: shell
- $ make rebuildreqs
+ $ just rebuild-reqs
Then rebuild your docker environment:
.. code-block:: shell
- $ make build
+ $ just build
If there are problems, it'll tell you.
@@ -260,7 +260,7 @@ dependencies. To do this, run:
.. code-block:: shell
- $ make updatereqs
+ $ just rebuild-reqs --update
Configuration
@@ -303,7 +303,7 @@ To build the docs, do:
.. code-block:: shell
- $ make docs
+ $ just docs
Then view ``docs/_build/html/index.html`` in your browser.
@@ -317,7 +317,7 @@ To run all the tests, do:
.. code-block:: shell
- $ make test
+ $ just test
Tests for the Symbolication Service webapp go in ``tests/``.
@@ -326,7 +326,7 @@ the testshell:
.. code-block:: shell
- $ make testshell
+ $ just test-shell
app@xxx:/app$ pytest
@@ -355,7 +355,7 @@ first run:
.. code-block:: shell
- $ make devcontainerbuild
+ $ just build devcontainer
Additionally on mac there is the potential that running git from inside any
container that mounts the current directory to `/app`, such as the development
@@ -375,7 +375,7 @@ by app:app, so that's fine:
How to change settings in your local dev environment
----------------------------------------------------
Edit the ``.env`` file and add/remove/change settings. These environment
-variables are used by make and automatically included by docker compose.
+variables are automatically included by docker compose and just.
If you are using a VS Code development container for other repositories such as
`tecken `_ or
@@ -394,7 +394,7 @@ the container to pick up changes:
.. code-block:: shell
- $ make devcontainer
+ $ just run -d devcontainer
How to upgrade the Python version
---------------------------------
diff --git a/justfile b/justfile
new file mode 100644
index 00000000..fe9fb919
--- /dev/null
+++ b/justfile
@@ -0,0 +1,66 @@
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at https://mozilla.org/MPL/2.0/.
+
+_default:
+ @just --list
+
+_env:
+ #!/usr/bin/env sh
+ if [ ! -f .env ]; then
+ echo "Copying docker/config/env-dist to .env..."
+ cp docker/config/env-dist .env
+ fi
+
+# Build docker images.
+build *args='base fakesentry statsd': _env
+ docker compose --progress plain build {{args}}
+
+# Run the webapp and services.
+run *args='--attach=eliot --attach=fakesentry eliot': _env
+ docker compose up {{args}}
+
+# Stop service containers.
+stop *args:
+ docker compose stop {{args}}
+
+# Remove service containers and networks.
+down *args:
+ docker compose down {{args}}
+
+# Open a shell in the web image.
+shell *args='/bin/bash': _env
+ docker compose run --rm --entrypoint= eliot {{args}}
+
+# Open a shell in the test container.
+test-shell *args='/bin/bash': _env
+ docker compose run --rm --entrypoint= test {{args}}
+
+# Stop and remove docker containers and artifacts.
+clean:
+ docker compose down
+
+# Run tests.
+test *args: _env
+ docker compose run --rm test bash ./bin/run_test.sh {{args}}
+
+# Generate Sphinx HTML documentation.
+docs: _env
+ docker compose run --rm --no-deps eliot bash make -C docs/ clean
+ docker compose run --rm --no-deps eliot bash make -C docs/ html
+
+# Lint code, or use --fix to reformat and apply auto-fixes for lint.
+lint *args: _env
+ docker compose run --rm --no-deps eliot bash ./bin/run_lint.sh {{args}}
+
+# Rebuild requirements.txt file after requirements.in changes.
+rebuild-reqs *args: _env
+ docker compose run --rm --no-deps eliot bash pip-compile --generate-hashes --strip-extras {{args}}
+
+# Verify that the requirements file is built by the version of Python that runs in the container.
+verify-reqs:
+ docker compose run --rm --no-deps eliot bash ./bin/run_verify_reqs.sh
+
+# Check how far behind different server environments are from main tip.
+service-status *args:
+ docker compose run --rm --no-deps eliot bash service-status {{args}}
diff --git a/requirements.in b/requirements.in
index eb8cbb3c..6379163b 100644
--- a/requirements.in
+++ b/requirements.in
@@ -25,3 +25,6 @@ sphinx-rtd-theme==3.0.2
symbolic==12.12.1
urllib3==2.2.3
werkzeug==3.1.3
+# Mozilla obs-team libraries that are published to GAR instead of pypi
+--extra-index-url https://us-python.pkg.dev/moz-fx-cavendish-prod/cavendish-prod-python/simple/
+obs-common==2024.11.7
diff --git a/requirements.txt b/requirements.txt
index df78517a..40761116 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -4,6 +4,8 @@
#
# pip-compile --generate-hashes --strip-extras
#
+--extra-index-url https://us-python.pkg.dev/moz-fx-cavendish-prod/cavendish-prod-python/simple/
+
alabaster==0.7.16 \
--hash=sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65 \
--hash=sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92
@@ -31,6 +33,10 @@ build==1.2.1 \
--hash=sha256:526263f4870c26f26c433545579475377b2b7588b6f1eac76a001e873ae3e19d \
--hash=sha256:75e10f767a433d9a86e50d83f418e83efc18ede923ee5ff7df93b6cb0306c5d4
# via pip-tools
+cachetools==5.5.0 \
+ --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \
+ --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a
+ # via google-auth
certifi==2022.12.7 \
--hash=sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3 \
--hash=sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18
@@ -98,6 +104,7 @@ click==8.1.7 \
--hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de
# via
# -r requirements.in
+ # obs-common
# pip-tools
datadog==0.50.1 \
--hash=sha256:579d4db54bd6ef918c5250217edb15b80b7b11582b8e24fce43702768c3f2e2d \
@@ -105,6 +112,12 @@ datadog==0.50.1 \
# via
# -r requirements.in
# markus
+deprecated==1.2.15 \
+ --hash=sha256:353bc4a8ac4bfc96800ddab349d89c25dec1079f65fd53acdcc1e0b975b21320 \
+ --hash=sha256:683e561a90de76239796e6b6feac66b99030d2dd3fcf61ef996330f14bbb9b0d
+ # via
+ # opentelemetry-api
+ # opentelemetry-semantic-conventions
dockerflow==2024.4.2 \
--hash=sha256:b9f92455449ba46555f57db34cccefc4c49d3533c67793624ab7e80a1625caa7 \
--hash=sha256:f4216a3a809093860d7b2db84ba0a25c894cb8eb98b74f4f6a04badbc4f6b0a4
@@ -169,6 +182,147 @@ fillmore==2.1.0 \
--hash=sha256:251ed9154ba7f20f5825e4d757db0ad7b1642e72bda7657fe39fe39031cd2092 \
--hash=sha256:29873e6f7fae15b32ddd01eff7a8418f26ab33c731f3b99a6a07a4c4c8c3625f
# via -r requirements.in
+google-api-core==2.23.0 \
+ --hash=sha256:2ceb087315e6af43f256704b871d99326b1f12a9d6ce99beaedec99ba26a0ace \
+ --hash=sha256:c20100d4c4c41070cf365f1d8ddf5365915291b5eb11b83829fbd1c999b5122f
+ # via
+ # google-cloud-core
+ # google-cloud-pubsub
+ # google-cloud-storage
+google-auth==2.36.0 \
+ --hash=sha256:51a15d47028b66fd36e5c64a82d2d57480075bccc7da37cde257fc94177a61fb \
+ --hash=sha256:545e9618f2df0bcbb7dcbc45a546485b1212624716975a1ea5ae8149ce769ab1
+ # via
+ # google-api-core
+ # google-cloud-core
+ # google-cloud-pubsub
+ # google-cloud-storage
+google-cloud-core==2.4.1 \
+ --hash=sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073 \
+ --hash=sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61
+ # via google-cloud-storage
+google-cloud-pubsub==2.27.1 \
+ --hash=sha256:3ca8980c198a847ee464845ab60f05478d4819cf693c9950ee89da96f0b80a41 \
+ --hash=sha256:7119dbc5af4b915ecdfa1289919f791a432927eaaa7bbfbeb740e6d7020c181e
+ # via obs-common
+google-cloud-storage==2.18.2 \
+ --hash=sha256:97a4d45c368b7d401ed48c4fdfe86e1e1cb96401c9e199e419d289e2c0370166 \
+ --hash=sha256:aaf7acd70cdad9f274d29332673fcab98708d0e1f4dceb5a5356aaef06af4d99
+ # via obs-common
+google-crc32c==1.6.0 \
+ --hash=sha256:05e2d8c9a2f853ff116db9706b4a27350587f341eda835f46db3c0a8c8ce2f24 \
+ --hash=sha256:18e311c64008f1f1379158158bb3f0c8d72635b9eb4f9545f8cf990c5668e59d \
+ --hash=sha256:236c87a46cdf06384f614e9092b82c05f81bd34b80248021f729396a78e55d7e \
+ --hash=sha256:35834855408429cecf495cac67ccbab802de269e948e27478b1e47dfb6465e57 \
+ --hash=sha256:386122eeaaa76951a8196310432c5b0ef3b53590ef4c317ec7588ec554fec5d2 \
+ --hash=sha256:40b05ab32a5067525670880eb5d169529089a26fe35dce8891127aeddc1950e8 \
+ --hash=sha256:48abd62ca76a2cbe034542ed1b6aee851b6f28aaca4e6551b5599b6f3ef175cc \
+ --hash=sha256:50cf2a96da226dcbff8671233ecf37bf6e95de98b2a2ebadbfdf455e6d05df42 \
+ --hash=sha256:51c4f54dd8c6dfeb58d1df5e4f7f97df8abf17a36626a217f169893d1d7f3e9f \
+ --hash=sha256:5bcc90b34df28a4b38653c36bb5ada35671ad105c99cfe915fb5bed7ad6924aa \
+ --hash=sha256:62f6d4a29fea082ac4a3c9be5e415218255cf11684ac6ef5488eea0c9132689b \
+ --hash=sha256:6eceb6ad197656a1ff49ebfbbfa870678c75be4344feb35ac1edf694309413dc \
+ --hash=sha256:7aec8e88a3583515f9e0957fe4f5f6d8d4997e36d0f61624e70469771584c760 \
+ --hash=sha256:91ca8145b060679ec9176e6de4f89b07363d6805bd4760631ef254905503598d \
+ --hash=sha256:a184243544811e4a50d345838a883733461e67578959ac59964e43cca2c791e7 \
+ --hash=sha256:a9e4b426c3702f3cd23b933436487eb34e01e00327fac20c9aebb68ccf34117d \
+ --hash=sha256:bb0966e1c50d0ef5bc743312cc730b533491d60585a9a08f897274e57c3f70e0 \
+ --hash=sha256:bb8b3c75bd157010459b15222c3fd30577042a7060e29d42dabce449c087f2b3 \
+ --hash=sha256:bd5e7d2445d1a958c266bfa5d04c39932dc54093fa391736dbfdb0f1929c1fb3 \
+ --hash=sha256:c87d98c7c4a69066fd31701c4e10d178a648c2cac3452e62c6b24dc51f9fcc00 \
+ --hash=sha256:d2952396dc604544ea7476b33fe87faedc24d666fb0c2d5ac971a2b9576ab871 \
+ --hash=sha256:d8797406499f28b5ef791f339594b0b5fdedf54e203b5066675c406ba69d705c \
+ --hash=sha256:d9e9913f7bd69e093b81da4535ce27af842e7bf371cde42d1ae9e9bd382dc0e9 \
+ --hash=sha256:e2806553238cd076f0a55bddab37a532b53580e699ed8e5606d0de1f856b5205 \
+ --hash=sha256:ebab974b1687509e5c973b5c4b8b146683e101e102e17a86bd196ecaa4d099fc \
+ --hash=sha256:ed767bf4ba90104c1216b68111613f0d5926fb3780660ea1198fc469af410e9d \
+ --hash=sha256:f7a1fc29803712f80879b0806cb83ab24ce62fc8daf0569f2204a0cfd7f68ed4
+ # via
+ # google-cloud-storage
+ # google-resumable-media
+google-resumable-media==2.7.2 \
+ --hash=sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa \
+ --hash=sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0
+ # via google-cloud-storage
+googleapis-common-protos==1.66.0 \
+ --hash=sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c \
+ --hash=sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed
+ # via
+ # google-api-core
+ # grpc-google-iam-v1
+ # grpcio-status
+grpc-google-iam-v1==0.13.1 \
+ --hash=sha256:3ff4b2fd9d990965e410965253c0da6f66205d5a8291c4c31c6ebecca18a9001 \
+ --hash=sha256:c3e86151a981811f30d5e7330f271cee53e73bb87755e88cc3b6f0c7b5fe374e
+ # via google-cloud-pubsub
+grpcio==1.68.0 \
+ --hash=sha256:0d230852ba97654453d290e98d6aa61cb48fa5fafb474fb4c4298d8721809354 \
+ --hash=sha256:0efbbd849867e0e569af09e165363ade75cf84f5229b2698d53cf22c7a4f9e21 \
+ --hash=sha256:14331e5c27ed3545360464a139ed279aa09db088f6e9502e95ad4bfa852bb116 \
+ --hash=sha256:15327ab81131ef9b94cb9f45b5bd98803a179c7c61205c8c0ac9aff9d6c4e82a \
+ --hash=sha256:15377bce516b1c861c35e18eaa1c280692bf563264836cece693c0f169b48829 \
+ --hash=sha256:15fa1fe25d365a13bc6d52fcac0e3ee1f9baebdde2c9b3b2425f8a4979fccea1 \
+ --hash=sha256:18668e36e7f4045820f069997834e94e8275910b1f03e078a6020bd464cb2363 \
+ --hash=sha256:2af76ab7c427aaa26aa9187c3e3c42f38d3771f91a20f99657d992afada2294a \
+ --hash=sha256:2bddd04a790b69f7a7385f6a112f46ea0b34c4746f361ebafe9ca0be567c78e9 \
+ --hash=sha256:32a9cb4686eb2e89d97022ecb9e1606d132f85c444354c17a7dbde4a455e4a3b \
+ --hash=sha256:3ac7f10850fd0487fcce169c3c55509101c3bde2a3b454869639df2176b60a03 \
+ --hash=sha256:3b2b559beb2d433129441783e5f42e3be40a9e1a89ec906efabf26591c5cd415 \
+ --hash=sha256:4028b8e9a3bff6f377698587d642e24bd221810c06579a18420a17688e421af7 \
+ --hash=sha256:44bcbebb24363d587472089b89e2ea0ab2e2b4df0e4856ba4c0b087c82412121 \
+ --hash=sha256:46a2d74d4dd8993151c6cd585594c082abe74112c8e4175ddda4106f2ceb022f \
+ --hash=sha256:4df81d78fd1646bf94ced4fb4cd0a7fe2e91608089c522ef17bc7db26e64effd \
+ --hash=sha256:4e300e6978df0b65cc2d100c54e097c10dfc7018b9bd890bbbf08022d47f766d \
+ --hash=sha256:4f1931c7aa85be0fa6cea6af388e576f3bf6baee9e5d481c586980c774debcb4 \
+ --hash=sha256:50992f214264e207e07222703c17d9cfdcc2c46ed5a1ea86843d440148ebbe10 \
+ --hash=sha256:55d3b52fd41ec5772a953612db4e70ae741a6d6ed640c4c89a64f017a1ac02b5 \
+ --hash=sha256:5a180328e92b9a0050958ced34dddcb86fec5a8b332f5a229e353dafc16cd332 \
+ --hash=sha256:619b5d0f29f4f5351440e9343224c3e19912c21aeda44e0c49d0d147a8d01544 \
+ --hash=sha256:6b2f98165ea2790ea159393a2246b56f580d24d7da0d0342c18a085299c40a75 \
+ --hash=sha256:6f9c7ad1a23e1047f827385f4713b5b8c6c7d325705be1dd3e31fb00dcb2f665 \
+ --hash=sha256:79f81b7fbfb136247b70465bd836fa1733043fdee539cd6031cb499e9608a110 \
+ --hash=sha256:7e0a3e72c0e9a1acab77bef14a73a416630b7fd2cbd893c0a873edc47c42c8cd \
+ --hash=sha256:7e7483d39b4a4fddb9906671e9ea21aaad4f031cdfc349fec76bdfa1e404543a \
+ --hash=sha256:88fb2925789cfe6daa20900260ef0a1d0a61283dfb2d2fffe6194396a354c618 \
+ --hash=sha256:8af6137cc4ae8e421690d276e7627cfc726d4293f6607acf9ea7260bd8fc3d7d \
+ --hash=sha256:8b0ff09c81e3aded7a183bc6473639b46b6caa9c1901d6f5e2cba24b95e59e30 \
+ --hash=sha256:8c73f9fbbaee1a132487e31585aa83987ddf626426d703ebcb9a528cf231c9b1 \
+ --hash=sha256:99f06232b5c9138593ae6f2e355054318717d32a9c09cdc5a2885540835067a1 \
+ --hash=sha256:9fe1b141cda52f2ca73e17d2d3c6a9f3f3a0c255c216b50ce616e9dca7e3441d \
+ --hash=sha256:a17278d977746472698460c63abf333e1d806bd41f2224f90dbe9460101c9796 \
+ --hash=sha256:a59f5822f9459bed098ffbceb2713abbf7c6fd13f2b9243461da5c338d0cd6c3 \
+ --hash=sha256:a6213d2f7a22c3c30a479fb5e249b6b7e648e17f364598ff64d08a5136fe488b \
+ --hash=sha256:a831dcc343440969aaa812004685ed322cdb526cd197112d0db303b0da1e8659 \
+ --hash=sha256:afbf45a62ba85a720491bfe9b2642f8761ff348006f5ef67e4622621f116b04a \
+ --hash=sha256:b0cf343c6f4f6aa44863e13ec9ddfe299e0be68f87d68e777328bff785897b05 \
+ --hash=sha256:c03d89df516128febc5a7e760d675b478ba25802447624edf7aa13b1e7b11e2a \
+ --hash=sha256:c1245651f3c9ea92a2db4f95d37b7597db6b246d5892bca6ee8c0e90d76fb73c \
+ --hash=sha256:cc5f0a4f5904b8c25729a0498886b797feb817d1fd3812554ffa39551112c161 \
+ --hash=sha256:dba037ff8d284c8e7ea9a510c8ae0f5b016004f13c3648f72411c464b67ff2fb \
+ --hash=sha256:def1a60a111d24376e4b753db39705adbe9483ef4ca4761f825639d884d5da78 \
+ --hash=sha256:e0d2f68eaa0a755edd9a47d40e50dba6df2bceda66960dee1218da81a2834d27 \
+ --hash=sha256:e0d30f3fee9372796f54d3100b31ee70972eaadcc87314be369360248a3dcffe \
+ --hash=sha256:e18589e747c1e70b60fab6767ff99b2d0c359ea1db8a2cb524477f93cdbedf5b \
+ --hash=sha256:e1e7ed311afb351ff0d0e583a66fcb39675be112d61e7cfd6c8269884a98afbc \
+ --hash=sha256:e46541de8425a4d6829ac6c5d9b16c03c292105fe9ebf78cb1c31e8d242f9155 \
+ --hash=sha256:e694b5928b7b33ca2d3b4d5f9bf8b5888906f181daff6b406f4938f3a997a490 \
+ --hash=sha256:f60fa2adf281fd73ae3a50677572521edca34ba373a45b457b5ebe87c2d01e1d \
+ --hash=sha256:f84890b205692ea813653ece4ac9afa2139eae136e419231b0eec7c39fdbe4c2 \
+ --hash=sha256:f8f695d9576ce836eab27ba7401c60acaf9ef6cf2f70dfe5462055ba3df02cc3 \
+ --hash=sha256:fc05759ffbd7875e0ff2bd877be1438dfe97c9312bbc558c8284a9afa1d0f40e \
+ --hash=sha256:fd2c2d47969daa0e27eadaf15c13b5e92605c5e5953d23c06d0b5239a2f176d3
+ # via
+ # google-api-core
+ # google-cloud-pubsub
+ # googleapis-common-protos
+ # grpc-google-iam-v1
+ # grpcio-status
+grpcio-status==1.68.0 \
+ --hash=sha256:0a71b15d989f02df803b4ba85c5bf1f43aeaa58ac021e5f9974b8cadc41f784d \
+ --hash=sha256:8369823de22ab6a2cddb3804669c149ae7a71819e127c2dca7c2322028d52bea
+ # via
+ # google-api-core
+ # google-cloud-pubsub
gunicorn==23.0.0 \
--hash=sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d \
--hash=sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec
@@ -185,6 +339,10 @@ imagesize==1.3.0 \
--hash=sha256:1db2f82529e53c3e929e8926a1fa9235aa82d0bd0c580359c67ec31b2fddaa8c \
--hash=sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d
# via sphinx
+importlib-metadata==8.5.0 \
+ --hash=sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b \
+ --hash=sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7
+ # via opentelemetry-api
iniconfig==1.1.1 \
--hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \
--hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32
@@ -322,6 +480,24 @@ msgpack==1.1.0 \
--hash=sha256:f80bc7d47f76089633763f952e67f8214cb7b3ee6bfa489b3cb6a84cfac114cd \
--hash=sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788
# via -r requirements.in
+obs-common==2024.11.7 \
+ --hash=sha256:be6f17329f037abf0b47c362ce2b084fcebff0c299e4791d4c3256f279a1880c
+ # via -r requirements.in
+opentelemetry-api==1.28.2 \
+ --hash=sha256:6fcec89e265beb258fe6b1acaaa3c8c705a934bd977b9f534a2b7c0d2d4275a6 \
+ --hash=sha256:ecdc70c7139f17f9b0cf3742d57d7020e3e8315d6cffcdf1a12a905d45b19cc0
+ # via
+ # google-cloud-pubsub
+ # opentelemetry-sdk
+ # opentelemetry-semantic-conventions
+opentelemetry-sdk==1.28.2 \
+ --hash=sha256:5fed24c5497e10df30282456fe2910f83377797511de07d14cec0d3e0a1a3110 \
+ --hash=sha256:93336c129556f1e3ccd21442b94d3521759541521861b2214c499571b85cb71b
+ # via google-cloud-pubsub
+opentelemetry-semantic-conventions==0.49b2 \
+ --hash=sha256:44e32ce6a5bb8d7c0c617f84b9dc1c8deda1045a07dc16a688cc7cbeab679997 \
+ --hash=sha256:51e7e1d0daa958782b6c2a8ed05e5f0e7dd0716fc327ac058777b8659649ee54
+ # via opentelemetry-sdk
packaging==23.0 \
--hash=sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2 \
--hash=sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97
@@ -338,6 +514,41 @@ pluggy==1.5.0 \
--hash=sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1 \
--hash=sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669
# via pytest
+proto-plus==1.25.0 \
+ --hash=sha256:c91fc4a65074ade8e458e95ef8bac34d4008daa7cce4a12d6707066fca648961 \
+ --hash=sha256:fbb17f57f7bd05a68b7707e745e26528b0b3c34e378db91eef93912c54982d91
+ # via
+ # google-api-core
+ # google-cloud-pubsub
+protobuf==5.28.3 \
+ --hash=sha256:0c4eec6f987338617072592b97943fdbe30d019c56126493111cf24344c1cc24 \
+ --hash=sha256:135658402f71bbd49500322c0f736145731b16fc79dc8f367ab544a17eab4535 \
+ --hash=sha256:27b246b3723692bf1068d5734ddaf2fccc2cdd6e0c9b47fe099244d80200593b \
+ --hash=sha256:3e6101d095dfd119513cde7259aa703d16c6bbdfae2554dfe5cfdbe94e32d548 \
+ --hash=sha256:3fa2de6b8b29d12c61911505d893afe7320ce7ccba4df913e2971461fa36d584 \
+ --hash=sha256:64badbc49180a5e401f373f9ce7ab1d18b63f7dd4a9cdc43c92b9f0b481cef7b \
+ --hash=sha256:70585a70fc2dd4818c51287ceef5bdba6387f88a578c86d47bb34669b5552c36 \
+ --hash=sha256:712319fbdddb46f21abb66cd33cb9e491a5763b2febd8f228251add221981135 \
+ --hash=sha256:91fba8f445723fcf400fdbe9ca796b19d3b1242cd873907979b9ed71e4afe868 \
+ --hash=sha256:a3f6857551e53ce35e60b403b8a27b0295f7d6eb63d10484f12bc6879c715687 \
+ --hash=sha256:cee1757663fa32a1ee673434fcf3bf24dd54763c79690201208bafec62f19eed
+ # via
+ # google-api-core
+ # google-cloud-pubsub
+ # googleapis-common-protos
+ # grpc-google-iam-v1
+ # grpcio-status
+ # proto-plus
+pyasn1==0.6.1 \
+ --hash=sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629 \
+ --hash=sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034
+ # via
+ # pyasn1-modules
+ # rsa
+pyasn1-modules==0.4.1 \
+ --hash=sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd \
+ --hash=sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c
+ # via google-auth
pycparser==2.21 \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
@@ -423,6 +634,9 @@ requests==2.32.3 \
# via
# -r requirements.in
# datadog
+ # google-api-core
+ # google-cloud-storage
+ # obs-common
# requests-mock
# sphinx
requests-mock==1.12.1 \
@@ -532,6 +746,10 @@ rpds-py==0.13.2 \
# via
# jsonschema
# referencing
+rsa==4.9 \
+ --hash=sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7 \
+ --hash=sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21
+ # via google-auth
ruff==0.7.4 \
--hash=sha256:00b4cf3a6b5fad6d1a66e7574d78956bbd09abfd6c8a997798f01f5da3d46a05 \
--hash=sha256:0d06218747d361d06fd2fdac734e7fa92df36df93035db3dc2ad7aa9852cb109 \
@@ -558,6 +776,7 @@ sentry-sdk==2.7.1 \
# via
# -r requirements.in
# fillmore
+ # obs-common
six==1.16.0 \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
@@ -617,6 +836,10 @@ symbolic==12.12.1 \
--hash=sha256:9b856c67f35fbc525caf310c21e5dd774f399cd437dddada0d6441fc968555d5 \
--hash=sha256:c7083e50fdeb31c9cc9dcda406c9bea5d4607d87271ab182c338a330dd41c9c4
# via -r requirements.in
+typing-extensions==4.12.2 \
+ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \
+ --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8
+ # via opentelemetry-sdk
urllib3==2.2.3 \
--hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \
--hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9
@@ -632,6 +855,82 @@ wheel==0.38.4 \
--hash=sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac \
--hash=sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8
# via pip-tools
+wrapt==1.16.0 \
+ --hash=sha256:0d2691979e93d06a95a26257adb7bfd0c93818e89b1406f5a28f36e0d8c1e1fc \
+ --hash=sha256:14d7dc606219cdd7405133c713f2c218d4252f2a469003f8c46bb92d5d095d81 \
+ --hash=sha256:1a5db485fe2de4403f13fafdc231b0dbae5eca4359232d2efc79025527375b09 \
+ --hash=sha256:1acd723ee2a8826f3d53910255643e33673e1d11db84ce5880675954183ec47e \
+ --hash=sha256:1ca9b6085e4f866bd584fb135a041bfc32cab916e69f714a7d1d397f8c4891ca \
+ --hash=sha256:1dd50a2696ff89f57bd8847647a1c363b687d3d796dc30d4dd4a9d1689a706f0 \
+ --hash=sha256:2076fad65c6736184e77d7d4729b63a6d1ae0b70da4868adeec40989858eb3fb \
+ --hash=sha256:2a88e6010048489cda82b1326889ec075a8c856c2e6a256072b28eaee3ccf487 \
+ --hash=sha256:3ebf019be5c09d400cf7b024aa52b1f3aeebeff51550d007e92c3c1c4afc2a40 \
+ --hash=sha256:418abb18146475c310d7a6dc71143d6f7adec5b004ac9ce08dc7a34e2babdc5c \
+ --hash=sha256:43aa59eadec7890d9958748db829df269f0368521ba6dc68cc172d5d03ed8060 \
+ --hash=sha256:44a2754372e32ab315734c6c73b24351d06e77ffff6ae27d2ecf14cf3d229202 \
+ --hash=sha256:490b0ee15c1a55be9c1bd8609b8cecd60e325f0575fc98f50058eae366e01f41 \
+ --hash=sha256:49aac49dc4782cb04f58986e81ea0b4768e4ff197b57324dcbd7699c5dfb40b9 \
+ --hash=sha256:5eb404d89131ec9b4f748fa5cfb5346802e5ee8836f57d516576e61f304f3b7b \
+ --hash=sha256:5f15814a33e42b04e3de432e573aa557f9f0f56458745c2074952f564c50e664 \
+ --hash=sha256:5f370f952971e7d17c7d1ead40e49f32345a7f7a5373571ef44d800d06b1899d \
+ --hash=sha256:66027d667efe95cc4fa945af59f92c5a02c6f5bb6012bff9e60542c74c75c362 \
+ --hash=sha256:66dfbaa7cfa3eb707bbfcd46dab2bc6207b005cbc9caa2199bcbc81d95071a00 \
+ --hash=sha256:685f568fa5e627e93f3b52fda002c7ed2fa1800b50ce51f6ed1d572d8ab3e7fc \
+ --hash=sha256:6906c4100a8fcbf2fa735f6059214bb13b97f75b1a61777fcf6432121ef12ef1 \
+ --hash=sha256:6a42cd0cfa8ffc1915aef79cb4284f6383d8a3e9dcca70c445dcfdd639d51267 \
+ --hash=sha256:6dcfcffe73710be01d90cae08c3e548d90932d37b39ef83969ae135d36ef3956 \
+ --hash=sha256:6f6eac2360f2d543cc875a0e5efd413b6cbd483cb3ad7ebf888884a6e0d2e966 \
+ --hash=sha256:72554a23c78a8e7aa02abbd699d129eead8b147a23c56e08d08dfc29cfdddca1 \
+ --hash=sha256:73870c364c11f03ed072dda68ff7aea6d2a3a5c3fe250d917a429c7432e15228 \
+ --hash=sha256:73aa7d98215d39b8455f103de64391cb79dfcad601701a3aa0dddacf74911d72 \
+ --hash=sha256:75ea7d0ee2a15733684badb16de6794894ed9c55aa5e9903260922f0482e687d \
+ --hash=sha256:7bd2d7ff69a2cac767fbf7a2b206add2e9a210e57947dd7ce03e25d03d2de292 \
+ --hash=sha256:807cc8543a477ab7422f1120a217054f958a66ef7314f76dd9e77d3f02cdccd0 \
+ --hash=sha256:8e9723528b9f787dc59168369e42ae1c3b0d3fadb2f1a71de14531d321ee05b0 \
+ --hash=sha256:9090c9e676d5236a6948330e83cb89969f433b1943a558968f659ead07cb3b36 \
+ --hash=sha256:9153ed35fc5e4fa3b2fe97bddaa7cbec0ed22412b85bcdaf54aeba92ea37428c \
+ --hash=sha256:9159485323798c8dc530a224bd3ffcf76659319ccc7bbd52e01e73bd0241a0c5 \
+ --hash=sha256:941988b89b4fd6b41c3f0bfb20e92bd23746579736b7343283297c4c8cbae68f \
+ --hash=sha256:94265b00870aa407bd0cbcfd536f17ecde43b94fb8d228560a1e9d3041462d73 \
+ --hash=sha256:98b5e1f498a8ca1858a1cdbffb023bfd954da4e3fa2c0cb5853d40014557248b \
+ --hash=sha256:9b201ae332c3637a42f02d1045e1d0cccfdc41f1f2f801dafbaa7e9b4797bfc2 \
+ --hash=sha256:a0ea261ce52b5952bf669684a251a66df239ec6d441ccb59ec7afa882265d593 \
+ --hash=sha256:a33a747400b94b6d6b8a165e4480264a64a78c8a4c734b62136062e9a248dd39 \
+ --hash=sha256:a452f9ca3e3267cd4d0fcf2edd0d035b1934ac2bd7e0e57ac91ad6b95c0c6389 \
+ --hash=sha256:a86373cf37cd7764f2201b76496aba58a52e76dedfaa698ef9e9688bfd9e41cf \
+ --hash=sha256:ac83a914ebaf589b69f7d0a1277602ff494e21f4c2f743313414378f8f50a4cf \
+ --hash=sha256:aefbc4cb0a54f91af643660a0a150ce2c090d3652cf4052a5397fb2de549cd89 \
+ --hash=sha256:b3646eefa23daeba62643a58aac816945cadc0afaf21800a1421eeba5f6cfb9c \
+ --hash=sha256:b47cfad9e9bbbed2339081f4e346c93ecd7ab504299403320bf85f7f85c7d46c \
+ --hash=sha256:b935ae30c6e7400022b50f8d359c03ed233d45b725cfdd299462f41ee5ffba6f \
+ --hash=sha256:bb2dee3874a500de01c93d5c71415fcaef1d858370d405824783e7a8ef5db440 \
+ --hash=sha256:bc57efac2da352a51cc4658878a68d2b1b67dbe9d33c36cb826ca449d80a8465 \
+ --hash=sha256:bf5703fdeb350e36885f2875d853ce13172ae281c56e509f4e6eca049bdfb136 \
+ --hash=sha256:c31f72b1b6624c9d863fc095da460802f43a7c6868c5dda140f51da24fd47d7b \
+ --hash=sha256:c5cd603b575ebceca7da5a3a251e69561bec509e0b46e4993e1cac402b7247b8 \
+ --hash=sha256:d2efee35b4b0a347e0d99d28e884dfd82797852d62fcd7ebdeee26f3ceb72cf3 \
+ --hash=sha256:d462f28826f4657968ae51d2181a074dfe03c200d6131690b7d65d55b0f360f8 \
+ --hash=sha256:d5e49454f19ef621089e204f862388d29e6e8d8b162efce05208913dde5b9ad6 \
+ --hash=sha256:da4813f751142436b075ed7aa012a8778aa43a99f7b36afe9b742d3ed8bdc95e \
+ --hash=sha256:db2e408d983b0e61e238cf579c09ef7020560441906ca990fe8412153e3b291f \
+ --hash=sha256:db98ad84a55eb09b3c32a96c576476777e87c520a34e2519d3e59c44710c002c \
+ --hash=sha256:dbed418ba5c3dce92619656802cc5355cb679e58d0d89b50f116e4a9d5a9603e \
+ --hash=sha256:dcdba5c86e368442528f7060039eda390cc4091bfd1dca41e8046af7c910dda8 \
+ --hash=sha256:decbfa2f618fa8ed81c95ee18a387ff973143c656ef800c9f24fb7e9c16054e2 \
+ --hash=sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020 \
+ --hash=sha256:eb1b046be06b0fce7249f1d025cd359b4b80fc1c3e24ad9eca33e0dcdb2e4a35 \
+ --hash=sha256:eb6e651000a19c96f452c85132811d25e9264d836951022d6e81df2fff38337d \
+ --hash=sha256:ed867c42c268f876097248e05b6117a65bcd1e63b779e916fe2e33cd6fd0d3c3 \
+ --hash=sha256:edfad1d29c73f9b863ebe7082ae9321374ccb10879eeabc84ba3b69f2579d537 \
+ --hash=sha256:f2058f813d4f2b5e3a9eb2eb3faf8f1d99b81c3e51aeda4b168406443e8ba809 \
+ --hash=sha256:f6b2d0c6703c988d334f297aa5df18c45e97b0af3679bb75059e0e0bd8b1069d \
+ --hash=sha256:f8212564d49c50eb4565e502814f694e240c55551a5f1bc841d4fcaabb0a9b8a \
+ --hash=sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4
+ # via deprecated
+zipp==3.21.0 \
+ --hash=sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4 \
+ --hash=sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931
+ # via importlib-metadata
# WARNING: The following packages were not pinned, but pip requires them to be
# pinned when the requirements file includes hashes and the requirement is not