From 372006df3eb2d37b78f77ab5c7737e840492dfab Mon Sep 17 00:00:00 2001 From: root Date: Mon, 9 Dec 2024 20:15:43 +0000 Subject: [PATCH 1/5] starting workcell manager --- .devcontainer/DevContainer.Dockerfile | 12 +- .devcontainer/devcontainer.compose.yaml | 48 +- .devcontainer/devcontainer.json | 106 +- .dockerignore | 2 +- .github/dependabot.yml | 24 +- .justfile | 82 +- .pre-commit-config.yaml | 58 +- README.md | 34 +- .../madsci_client/madsci/client/__init__.py | 2 +- .../madsci/client/cli/__init__.py | 82 +- .../madsci/client/cli/lab_cli.py | 530 +-- .../madsci/client/cli/module_cli.py | 564 +-- .../madsci/client/cli/node_cli.py | 894 ++--- .../madsci/client/cli/resources_cli.py | 728 ++-- .../madsci/client/cli/workcell_cli.py | 522 +-- .../madsci/client/node/__init__.py | 28 +- .../client/node/abstract_node_client.py | 158 +- .../madsci/client/node/rest_node_client.py | 302 +- madsci/madsci_client/pyproject.toml | 70 +- .../madsci_common/madsci/common/__init__.py | 2 +- .../madsci/common/definition_loaders.py | 280 +- madsci/madsci_common/madsci/common/events.py | 122 +- .../madsci_common/madsci/common/exceptions.py | 26 +- .../madsci/common/types/__init__.py | 2 +- .../madsci/common/types/action_types.py | 702 ++-- .../common/types/admin_command_types.py | 70 +- .../madsci/common/types/auth_types.py | 196 +- .../madsci/common/types/base_types.py | 180 +- .../madsci/common/types/event_types.py | 212 +- .../madsci/common/types/location_types.py | 128 +- .../madsci/common/types/module_types.py | 400 +-- .../madsci/common/types/node_types.py | 540 +-- .../madsci/common/types/resource_types.py | 1436 ++++---- .../madsci/common/types/squid_types.py | 274 +- .../madsci/common/types/step_types.py | 160 +- .../madsci/common/types/validators.py | 44 +- .../madsci/common/types/workcell_types.py | 153 +- .../madsci/common/types/workflow_types.py | 150 + madsci/madsci_common/madsci/common/utils.py | 804 ++--- madsci/madsci_common/pdm.lock | 3124 ++++++++--------- madsci/madsci_common/pyproject.toml | 82 +- .../madsci_module/madsci/module/__init__.py | 2 +- .../madsci/module/abstract_module.py | 1160 +++--- .../madsci/module/rest_module.py | 654 ++-- madsci/madsci_module/pyproject.toml | 72 +- .../madsci/resource_manager/__init__.py | 2 +- .../resource_manager/resource_server.py | 66 +- .../madsci/resource_manager/types.py | 72 +- madsci/madsci_resource_manager/pyproject.toml | 70 +- madsci/madsci_squid/madsci/squid/__init__.py | 2 +- .../madsci_squid/madsci/squid/lab_server.py | 50 +- madsci/madsci_squid/pyproject.toml | 70 +- madsci/madsci_workcell_manager/README.md | 0 .../madsci/tests/workflow_tests.py | 19 + .../madsci/workcell_manager/__init__.py | 1 + .../madsci/workcell_manager/redis_handler.py | 220 ++ .../madsci/workcell_manager/scheduler.py | 59 + .../workcell_manager_types.py | 23 + .../workcell_manager/workcell_server.py | 126 + .../madsci/workcell_manager/workflow_utils.py | 103 + madsci/madsci_workcell_manager/pyproject.toml | 40 + pdm.lock | 1827 +++++----- pyproject.toml | 35 +- ruff.toml | 236 +- tests/.gitignore | 2 +- tests/example/example_lab.lab.yaml | 22 +- .../workcells/test_workcell.workcell.yaml | 17 +- .../workflows/test_workflow.workflow.yaml | 18 + tests/example_resources.yaml | 220 +- tests/test_cli.ipynb | 288 +- tests/test_module.ipynb | 158 +- tests/test_modules/liquidhandler.module.yaml | 76 +- tests/test_modules/liquidhandler.py | 106 +- .../test_modules/nodes/default.node.info.yaml | 118 +- tests/test_modules/nodes/default.node.yaml | 50 +- 75 files changed, 10197 insertions(+), 9120 deletions(-) create mode 100644 madsci/madsci_common/madsci/common/types/workflow_types.py create mode 100644 madsci/madsci_workcell_manager/README.md create mode 100644 madsci/madsci_workcell_manager/madsci/tests/workflow_tests.py create mode 100644 madsci/madsci_workcell_manager/madsci/workcell_manager/__init__.py create mode 100644 madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py create mode 100644 madsci/madsci_workcell_manager/madsci/workcell_manager/scheduler.py create mode 100644 madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_manager_types.py create mode 100644 madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py create mode 100644 madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py create mode 100644 madsci/madsci_workcell_manager/pyproject.toml create mode 100644 tests/example/workflows/test_workflow.workflow.yaml diff --git a/.devcontainer/DevContainer.Dockerfile b/.devcontainer/DevContainer.Dockerfile index e1b06a5..260a292 100644 --- a/.devcontainer/DevContainer.Dockerfile +++ b/.devcontainer/DevContainer.Dockerfile @@ -1,6 +1,6 @@ -# Note: You can use any Debian/Ubuntu based image you want. -FROM mcr.microsoft.com/devcontainers/base:bullseye - -# [Optional] Uncomment this section to install additional OS packages. -RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ - && apt-get -y install --no-install-recommends vim +# Note: You can use any Debian/Ubuntu based image you want. +FROM mcr.microsoft.com/devcontainers/base:bullseye + +# [Optional] Uncomment this section to install additional OS packages. +RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ + && apt-get -y install --no-install-recommends vim diff --git a/.devcontainer/devcontainer.compose.yaml b/.devcontainer/devcontainer.compose.yaml index d6a8427..6cebfd1 100644 --- a/.devcontainer/devcontainer.compose.yaml +++ b/.devcontainer/devcontainer.compose.yaml @@ -1,24 +1,24 @@ -services: - dev: - build: - context: . - dockerfile: DevContainer.Dockerfile - - volumes: - # Forwards the local Docker socket to the container. - - /var/run/docker.sock:/var/run/docker-host.sock - # Update this to wherever you want VS Code to mount the folder of your project - - ../..:/workspaces:cached - - # Overrides default command so things don't shut down after the process ends. - entrypoint: /usr/local/share/docker-init.sh - command: sleep infinity - - # Uncomment the next four lines if you will use a ptrace-based debuggers like C++, Go, and Rust. - # cap_add: - # - SYS_PTRACE - # security_opt: - # - seccomp:unconfined - - # Use "forwardPorts" in **devcontainer.json** to forward an app port locally. - # (Adding the "ports" property to this file will not forward from a Codespace.) +services: + dev: + build: + context: . + dockerfile: DevContainer.Dockerfile + + volumes: + # Forwards the local Docker socket to the container. + - /var/run/docker.sock:/var/run/docker-host.sock + # Update this to wherever you want VS Code to mount the folder of your project + - ../..:/workspaces:cached + + # Overrides default command so things don't shut down after the process ends. + entrypoint: /usr/local/share/docker-init.sh + command: sleep infinity + + # Uncomment the next four lines if you will use a ptrace-based debuggers like C++, Go, and Rust. + # cap_add: + # - SYS_PTRACE + # security_opt: + # - seccomp:unconfined + + # Use "forwardPorts" in **devcontainer.json** to forward an app port locally. + # (Adding the "ports" property to this file will not forward from a Codespace.) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 0f74ba3..3dfd071 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,53 +1,53 @@ -{ -// For format details, see https://aka.ms/devcontainer.json. For config options, see the -// README at: https://github.com/devcontainers/templates/tree/main/src/docker-outside-of-docker-compose - "name": "MADSci Dev Container", - "dockerComposeFile": "devcontainer.compose.yaml", - "service": "dev", - "workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}", - - // Use this environment variable if you need to bind mount your local source code into a new container. - "remoteEnv": { - "LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}" - }, - - "customizations": { - "vscode": { - "extensions": [ - "ms-python.python", - "donjayamanne.python-environment-manager", - "charliermarsh.ruff", - "VisualStudioExptTeam.vscodeintellicode", - "aaron-bond.better-comments", - "vuetifyjs.vuetify-vscode", - "christian-kohler.path-intellisense", - "nefrob.vscode-just", - "Vue.volar", - "redhat.vscode-yaml", - "KevinRose.vsc-python-indent", - "ms-python.vscode-pylance", - "ms-toolsai.jupyter" - ] - } - }, - - "features": { - "ghcr.io/devcontainers/features/docker-outside-of-docker:1": {}, - "ghcr.io/devcontainers/features/python:1": {}, - "ghcr.io/guiyomh/features/just:0": {}, - "ghcr.io/devcontainers-extra/features/act:1": {}, - "ghcr.io/devcontainers-extra/features/actionlint:1": {}, - "ghcr.io/devcontainers-extra/features/pdm:2": {}, - "ghcr.io/devcontainers-extra/features/vue-cli:2": {}, - "ghcr.io/devcontainers-extra/features/pre-commit:2": {}, - "ghcr.io/devcontainers-extra/features/ruff:1": {} - }, - // Use 'forwardPorts' to make a list of ports inside the container available locally. - // "forwardPorts": [8000], - - // Use 'postCreateCommand' to run commands after the container is created. - "postCreateCommand": "just init" - - // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. - // "remoteUser": "root" -} +{ +// For format details, see https://aka.ms/devcontainer.json. For config options, see the +// README at: https://github.com/devcontainers/templates/tree/main/src/docker-outside-of-docker-compose + "name": "MADSci Dev Container", + "dockerComposeFile": "devcontainer.compose.yaml", + "service": "dev", + "workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}", + + // Use this environment variable if you need to bind mount your local source code into a new container. + "remoteEnv": { + "LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}" + }, + + "customizations": { + "vscode": { + "extensions": [ + "ms-python.python", + "donjayamanne.python-environment-manager", + "charliermarsh.ruff", + "VisualStudioExptTeam.vscodeintellicode", + "aaron-bond.better-comments", + // "vuetifyjs.vuetify-vscode", + "christian-kohler.path-intellisense", + "nefrob.vscode-just", + // "Vue.volar", + "redhat.vscode-yaml", + "KevinRose.vsc-python-indent", + "ms-python.vscode-pylance", + "ms-toolsai.jupyter" + ] + } + }, + + "features": { + "ghcr.io/devcontainers/features/docker-outside-of-docker:1": {}, + "ghcr.io/devcontainers/features/python:1": {}, + "ghcr.io/guiyomh/features/just:0": {}, + "ghcr.io/devcontainers-extra/features/act:1": {}, + "ghcr.io/devcontainers-extra/features/actionlint:1": {}, + "ghcr.io/devcontainers-extra/features/pdm:2": {}, + // "ghcr.io/devcontainers-extra/features/vue-cli:2": {}, + "ghcr.io/devcontainers-extra/features/pre-commit:2": {}, + "ghcr.io/devcontainers-extra/features/ruff:1": {} + }, + // Use 'forwardPorts' to make a list of ports inside the container available locally. + // "forwardPorts": [8000], + + // Use 'postCreateCommand' to run commands after the container is created. + "postCreateCommand": "just init" + + // Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. + // "remoteUser": "root" +} diff --git a/.dockerignore b/.dockerignore index cf70988..4e6927f 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1 +1 @@ -**/node_modules +**/node_modules diff --git a/.github/dependabot.yml b/.github/dependabot.yml index f33a02c..20cb428 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,12 +1,12 @@ -# To get started with Dependabot version updates, you'll need to specify which -# package ecosystems to update and where the package manifests are located. -# Please see the documentation for more information: -# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates -# https://containers.dev/guide/dependabot - -version: 2 -updates: - - package-ecosystem: "devcontainers" - directory: "/" - schedule: - interval: weekly +# To get started with Dependabot version updates, you'll need to specify which +# package ecosystems to update and where the package manifests are located. +# Please see the documentation for more information: +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates +# https://containers.dev/guide/dependabot + +version: 2 +updates: + - package-ecosystem: "devcontainers" + directory: "/" + schedule: + interval: weekly diff --git a/.justfile b/.justfile index 5c9aa17..1aaa2b4 100644 --- a/.justfile +++ b/.justfile @@ -1,41 +1,41 @@ -# List available commands -default: - @just --list --justfile {{justfile()}} - -# initialize the project -init: hooks - @which pdm || echo "pdm not found, you'll need to install it: https://github.com/pdm-project/pdm" - @pdm install - @#test -e .env || cp .env.example .env - -# Install the pre-commit hooks -hooks: - @pre-commit install - -# Run the pre-commit checks -checks: - @pre-commit run --all-files || { echo "Checking fixes\n" ; pre-commit run --all-files; } -check: checks - - -# Python tasks - -# Update the pdm version -pdm-update: - @pdm self update - -# Install the default dependencies -pdm-install: - @pdm install - -# Install a specific group of dependencies -pdm-install-group group: - @pdm install --group {{group}} - -# Install all dependencies -pdm-install-all: - @just pdm-install-group :all - -# Build the python package -pdm-build: - @pdm build +# List available commands +default: + @just --list --justfile {{justfile()}} + +# initialize the project +init: hooks + @which pdm || echo "pdm not found, you'll need to install it: https://github.com/pdm-project/pdm" + @pdm install + @#test -e .env || cp .env.example .env + +# Install the pre-commit hooks +hooks: + @pre-commit install + +# Run the pre-commit checks +checks: + @pre-commit run --all-files || { echo "Checking fixes\n" ; pre-commit run --all-files; } +check: checks + + +# Python tasks + +# Update the pdm version +pdm-update: + @pdm self update + +# Install the default dependencies +pdm-install: + @pdm install + +# Install a specific group of dependencies +pdm-install-group group: + @pdm install --group {{group}} + +# Install all dependencies +pdm-install-all: + @just pdm-install-group :all + +# Build the python package +pdm-build: + @pdm build diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 056d5f3..ec1586e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,29 +1,29 @@ -repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v5.0.0 - hooks: - - id: check-yaml - - id: check-toml - - id: check-ast - - id: check-merge-conflict - - id: check-added-large-files - - id: mixed-line-ending - - id: end-of-file-fixer - - id: trailing-whitespace - - repo: https://github.com/kynan/nbstripout - rev: 0.8.1 - hooks: - - id: nbstripout - - repo: https://github.com/astral-sh/ruff-pre-commit - # Ruff version. - rev: v0.8.1 - hooks: - # Run the linter. - - id: ruff - args: [--fix] - # Run the formatter. - - id: ruff-format - - repo: https://gitlab.com/bmares/check-json5 - rev: v1.0.0 - hooks: - - id: check-json5 +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: check-yaml + - id: check-toml + - id: check-ast + - id: check-merge-conflict + - id: check-added-large-files + - id: mixed-line-ending + - id: end-of-file-fixer + - id: trailing-whitespace + - repo: https://github.com/kynan/nbstripout + rev: 0.8.1 + hooks: + - id: nbstripout + - repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.8.1 + hooks: + # Run the linter. + - id: ruff + args: [--fix] + # Run the formatter. + - id: ruff-format + - repo: https://gitlab.com/bmares/check-json5 + rev: v1.0.0 + hooks: + - id: check-json5 diff --git a/README.md b/README.md index c57b87b..89f2413 100644 --- a/README.md +++ b/README.md @@ -1,17 +1,17 @@ -# Modular Autonomous Discovery for Science (MADSci) - -## Overview - -MADSci is a modular, autonomous, and scalable framework for scientific discovery and experimentation. - -## Components - -- [Squid](./src/madsci.squid/README.md): The Workcell Management Engine. -- [Types](./src/madsci.types/README.md): The Type Definition Library for MADSci. -- [PyClient](./src/madsci.pyclient/README.md): The Python Client for MADSci. -- [Module](./src/madsci.module/README.md): The Module Library for integrating devices. -- [Server](./src/madsci.server/README.md): The REST API Server. -- [CLI](./src/madsci.cli/README.md): The Command Line Interface. -- [Dashboard](./src/madsci.dashboard/README.md): The web-based Dashboard and management interface. -- [Resources](./src/madsci.resources/README.md): The Resource Library for managing resources. -- [Events](./src/madsci.events/README.md): The Event Library for managing events. +# Modular Autonomous Discovery for Science (MADSci) + +## Overview + +MADSci is a modular, autonomous, and scalable framework for scientific discovery and experimentation. + +## Components + +- [Squid](./src/madsci.squid/README.md): The Workcell Management Engine. +- [Types](./src/madsci.types/README.md): The Type Definition Library for MADSci. +- [PyClient](./src/madsci.pyclient/README.md): The Python Client for MADSci. +- [Module](./src/madsci.module/README.md): The Module Library for integrating devices. +- [Server](./src/madsci.server/README.md): The REST API Server. +- [CLI](./src/madsci.cli/README.md): The Command Line Interface. +- [Dashboard](./src/madsci.dashboard/README.md): The web-based Dashboard and management interface. +- [Resources](./src/madsci.resources/README.md): The Resource Library for managing resources. +- [Events](./src/madsci.events/README.md): The Event Library for managing events. diff --git a/madsci/madsci_client/madsci/client/__init__.py b/madsci/madsci_client/madsci/client/__init__.py index 7c0673d..da93754 100644 --- a/madsci/madsci_client/madsci/client/__init__.py +++ b/madsci/madsci_client/madsci/client/__init__.py @@ -1 +1 @@ -"""The Modular Autonomous Discovery for Science (MADSci) Python Client and CLI.""" +"""The Modular Autonomous Discovery for Science (MADSci) Python Client and CLI.""" diff --git a/madsci/madsci_client/madsci/client/cli/__init__.py b/madsci/madsci_client/madsci/client/cli/__init__.py index 22c2084..52bd1cf 100644 --- a/madsci/madsci_client/madsci/client/cli/__init__.py +++ b/madsci/madsci_client/madsci/client/cli/__init__.py @@ -1,41 +1,41 @@ -"""Command Line Interface for the MADSci client.""" - -import click -from rich.console import Console -from trogon import tui - -from madsci.client.cli.lab_cli import lab -from madsci.client.cli.module_cli import module -from madsci.client.cli.node_cli import node -from madsci.client.cli.resources_cli import resource -from madsci.client.cli.workcell_cli import workcell - -console = Console() - - -@tui() -@click.group() -@click.option( - "--quiet", - "-q", - is_flag=True, - help="Run in quiet mode, skipping prompts.", -) -def root_cli(quiet: bool = False) -> None: - """MADSci command line interface.""" - - -@root_cli.command() -def version() -> None: - """Display the MADSci client version.""" - console.print("MADSci Client v0.1.0") - - -root_cli.add_command(lab) -root_cli.add_command(workcell) -root_cli.add_command(module) -root_cli.add_command(node) -root_cli.add_command(resource) - -if __name__ == "__main__": - tui(root_cli, auto_envvar_prefix="MADSCI_CLI_") +"""Command Line Interface for the MADSci client.""" + +import click +from rich.console import Console +from trogon import tui + +from madsci.client.cli.lab_cli import lab +from madsci.client.cli.module_cli import module +from madsci.client.cli.node_cli import node +from madsci.client.cli.resources_cli import resource +from madsci.client.cli.workcell_cli import workcell + +console = Console() + + +@tui() +@click.group() +@click.option( + "--quiet", + "-q", + is_flag=True, + help="Run in quiet mode, skipping prompts.", +) +def root_cli(quiet: bool = False) -> None: + """MADSci command line interface.""" + + +@root_cli.command() +def version() -> None: + """Display the MADSci client version.""" + console.print("MADSci Client v0.1.0") + + +root_cli.add_command(lab) +root_cli.add_command(workcell) +root_cli.add_command(module) +root_cli.add_command(node) +root_cli.add_command(resource) + +if __name__ == "__main__": + tui(root_cli, auto_envvar_prefix="MADSCI_CLI_") diff --git a/madsci/madsci_client/madsci/client/cli/lab_cli.py b/madsci/madsci_client/madsci/client/cli/lab_cli.py index a9e8bf1..c365d78 100644 --- a/madsci/madsci_client/madsci/client/cli/lab_cli.py +++ b/madsci/madsci_client/madsci/client/cli/lab_cli.py @@ -1,265 +1,265 @@ -"""Command Line Interface for managing MADSci Squid labs.""" - -import os -from pathlib import Path -from typing import Optional - -import click -from click.core import Context -from rich import print -from rich.console import Console -from rich.pretty import pprint - -from madsci.common.types.squid_types import LabDefinition -from madsci.common.utils import ( - prompt_for_input, - prompt_yes_no, - save_model, - search_for_file_pattern, - to_snake_case, -) - -console = Console() - - -class LabContext: - """Context object for lab commands.""" - - def __init__(self) -> None: - """Initialize the context object.""" - self.lab_def: Optional[LabDefinition] = None - self.path: Optional[Path] = None - self.quiet: bool = False - - -pass_lab = click.make_pass_decorator(LabContext) - - -def find_lab(name: Optional[str], path: Optional[str]) -> LabContext: - """Find a lab by name or path.""" - lab_context = LabContext() - - if path: - lab_context.path = Path(path) - if lab_context.path.exists(): - lab_context.lab_def = LabDefinition.from_yaml(path) - return lab_context - - if name: - lab_files = search_for_file_pattern("*.lab.yaml") - for lab_file in lab_files: - lab_def = LabDefinition.from_yaml(lab_file) - if lab_def.name == name: - lab_context.path = Path(lab_file) - lab_context.lab_def = lab_def - return lab_context - - # * Search for any lab file - lab_files = search_for_file_pattern("*.lab.yaml") - if lab_files: - lab_context.path = Path(lab_files[0]) - lab_context.lab_def = LabDefinition.from_yaml(lab_files[0]) - - return lab_context - - -@click.group() -@click.option("--name", "-n", type=str, help="The name of the lab to operate on.") -@click.option("--path", "-p", type=str, help="The path to the lab definition file.") -@click.pass_context -def lab(ctx: Context, name: Optional[str], path: Optional[str]) -> None: - """Manage labs.""" - ctx.obj = find_lab(name, path) - ctx.obj.quiet = ctx.parent.params.get("quiet") - - -@lab.command() -@click.option("--name", "-n", type=str, help="The name of the lab.", required=False) -@click.option("--path", "-p", type=str, help="The path to the lab definition file.") -@click.option("--description", "-d", type=str, help="The description of the lab.") -@click.pass_context -def create( - ctx: Context, - name: Optional[str], - path: Optional[str], - description: Optional[str], -) -> None: - """Create a new lab.""" - if not name: - name = ctx.parent.params.get("name") - if not name: - name = prompt_for_input("Lab Name", required=True, quiet=ctx.obj.quiet) - if not description: - description = prompt_for_input("Lab Description", quiet=ctx.obj.quiet) - - lab_definition = LabDefinition(name=name, description=description) - console.print(lab_definition) - - if not path: - path = ctx.parent.params.get("path") - if not path: - default_path = Path.cwd() / f"{to_snake_case(name)}.lab.yaml" - new_path = prompt_for_input( - "Path to save Lab Definition file", - default=str(default_path), - quiet=ctx.obj.quiet, - ) - if new_path: - path = Path(new_path) - print("Path:", path) - save_model(path=path, model=lab_definition, overwrite_check=not ctx.obj.quiet) - - -@lab.command() -def list() -> None: - """List all labs. Will list all labs in the current directory, subdirectories, and parent directories.""" - lab_files = search_for_file_pattern("*.lab.yaml") - - if lab_files: - for lab_file in sorted(set(lab_files)): - lab_definition = LabDefinition.from_yaml(lab_file) - console.print( - f"[bold]{lab_definition.name}[/]: {lab_definition.description} ({lab_file})", - ) - if lab_definition.workcells: - console.print(" Workcells:") - for name, workcell in lab_definition.workcells.items(): - if isinstance(workcell, str): - console.print(f" - {name}: {workcell}") - else: - console.print(f" - {name}: {workcell.description}") - else: - print("No lab definitions found") - - -@lab.command() -@pass_lab -def info(ctx: LabContext) -> None: - """Get information about a lab.""" - if ctx.lab_def: - pprint(ctx.lab_def) - else: - console.print( - "No lab found. Specify lab by name or path. If you don't have a lab file, you can create one with 'madsci lab create'.", - ) - - -@lab.command() -@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt.") -@pass_lab -def delete(ctx: LabContext, yes: bool) -> None: - """Delete a lab.""" - if ctx.lab_def and ctx.path: - console.print(f"Deleting lab: {ctx.lab_def.name} ({ctx.path})") - if yes or ctx.quiet or prompt_yes_no("Are you sure?"): - ctx.path.unlink() - console.print(f"Deleted {ctx.path}") - else: - console.print( - "No lab found. Specify lab by name or path. If you don't have a lab file, you can create one with 'madsci lab create'.", - ) - - -@lab.command() -@pass_lab -def validate(ctx: LabContext) -> None: - """Validate a lab definition file.""" - if ctx.lab_def: - console.print(ctx.lab_def) - else: - console.print( - "No lab found. Specify lab by name or path. If you don't have a lab definition file, you can create one with 'madsci lab create'.", - ) - - -def run_command(command: str, lab: LabDefinition, path: Path) -> None: - """Run a command in a lab.""" - console.print( - f"Running command: [bold]{command}[/] ({lab.commands[command]}) in lab: [bold]{lab.name}[/] ({path})", - ) - print(os.popen(lab.commands[command]).read()) # noqa: S605 - - -@lab.command() -@click.argument("command", type=str) -@pass_lab -def run(ctx: LabContext, command: str) -> None: - """Run a command in a lab.""" - if not ctx.lab_def: - console.print( - "No lab found. Specify lab by name or path. If you don't have a lab file, you can create one with 'madsci lab create'.", - ) - return - - if ctx.lab_def.commands.get(command): - run_command(command, ctx.lab_def, ctx.path) - else: - console.print( - f"Command [bold]{command}[/] not found in lab definition: [bold]{ctx.lab_def.name}[/] ({ctx.path})", - ) - - -@lab.command() -@click.option("--command_name", "--name", "-n", type=str, required=False) -@click.option("--command", "-c", type=str, required=False) -@pass_lab -def add_command(ctx: LabContext, command_name: str, command: str) -> None: - """Add a command to a lab definition.""" - if not ctx.lab_def: - console.print( - "No lab found. Specify lab by name or path. If you don't have a lab file, you can create one with 'madsci lab create'.", - ) - return - - if not command_name: - command_name = prompt_for_input("Command Name", required=True) - if not command: - command = prompt_for_input("Command", required=True) - - if command_name in ctx.lab_def.commands: - console.print( - f"Command [bold]{command_name}[/] already exists in lab definition: [bold]{ctx.lab_def.name}[/] ({ctx.path})", - ) - if not prompt_yes_no("Do you want to overwrite it?", default="no"): - return - - ctx.lab_def.commands[command_name] = command - save_model(ctx.path, ctx.lab_def, overwrite_check=False) - console.print( - f"Added command [bold]{command_name}[/] to lab: [bold]{ctx.lab_def.name}[/]", - ) - - -@lab.command() -@click.argument("command_name", type=str, required=False) -@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt.") -@pass_lab -def delete_command(ctx: LabContext, command_name: str, yes: bool) -> None: - """Delete a command from a lab definition.""" - if not ctx.lab_def: - console.print( - "No lab found. Specify lab by name or path. If you don't have a lab file, you can create one with 'madsci lab create'.", - ) - return - - if not command_name: - command_name = prompt_for_input("Command Name", required=True) - - if command_name in ctx.lab_def.commands: - if ( - yes - or ctx.quiet - or prompt_yes_no( - f"Are you sure you want to delete command [bold]{command_name}[/]?", - default="no", - ) - ): - del ctx.lab_def.commands[command_name] - save_model(ctx.path, ctx.lab_def, overwrite_check=False) - console.print( - f"Deleted command [bold]{command_name}[/] from lab: [bold]{ctx.lab_def.name}[/]", - ) - else: - console.print( - f"Command [bold]{command_name}[/] not found in lab definition: [bold]{ctx.lab_def.name}[/] ({ctx.path})", - ) +"""Command Line Interface for managing MADSci Squid labs.""" + +import os +from pathlib import Path +from typing import Optional + +import click +from click.core import Context +from rich import print +from rich.console import Console +from rich.pretty import pprint + +from madsci.common.types.squid_types import LabDefinition +from madsci.common.utils import ( + prompt_for_input, + prompt_yes_no, + save_model, + search_for_file_pattern, + to_snake_case, +) + +console = Console() + + +class LabContext: + """Context object for lab commands.""" + + def __init__(self) -> None: + """Initialize the context object.""" + self.lab_def: Optional[LabDefinition] = None + self.path: Optional[Path] = None + self.quiet: bool = False + + +pass_lab = click.make_pass_decorator(LabContext) + + +def find_lab(name: Optional[str], path: Optional[str]) -> LabContext: + """Find a lab by name or path.""" + lab_context = LabContext() + + if path: + lab_context.path = Path(path) + if lab_context.path.exists(): + lab_context.lab_def = LabDefinition.from_yaml(path) + return lab_context + + if name: + lab_files = search_for_file_pattern("*.lab.yaml") + for lab_file in lab_files: + lab_def = LabDefinition.from_yaml(lab_file) + if lab_def.name == name: + lab_context.path = Path(lab_file) + lab_context.lab_def = lab_def + return lab_context + + # * Search for any lab file + lab_files = search_for_file_pattern("*.lab.yaml") + if lab_files: + lab_context.path = Path(lab_files[0]) + lab_context.lab_def = LabDefinition.from_yaml(lab_files[0]) + + return lab_context + + +@click.group() +@click.option("--name", "-n", type=str, help="The name of the lab to operate on.") +@click.option("--path", "-p", type=str, help="The path to the lab definition file.") +@click.pass_context +def lab(ctx: Context, name: Optional[str], path: Optional[str]) -> None: + """Manage labs.""" + ctx.obj = find_lab(name, path) + ctx.obj.quiet = ctx.parent.params.get("quiet") + + +@lab.command() +@click.option("--name", "-n", type=str, help="The name of the lab.", required=False) +@click.option("--path", "-p", type=str, help="The path to the lab definition file.") +@click.option("--description", "-d", type=str, help="The description of the lab.") +@click.pass_context +def create( + ctx: Context, + name: Optional[str], + path: Optional[str], + description: Optional[str], +) -> None: + """Create a new lab.""" + if not name: + name = ctx.parent.params.get("name") + if not name: + name = prompt_for_input("Lab Name", required=True, quiet=ctx.obj.quiet) + if not description: + description = prompt_for_input("Lab Description", quiet=ctx.obj.quiet) + + lab_definition = LabDefinition(name=name, description=description) + console.print(lab_definition) + + if not path: + path = ctx.parent.params.get("path") + if not path: + default_path = Path.cwd() / f"{to_snake_case(name)}.lab.yaml" + new_path = prompt_for_input( + "Path to save Lab Definition file", + default=str(default_path), + quiet=ctx.obj.quiet, + ) + if new_path: + path = Path(new_path) + print("Path:", path) + save_model(path=path, model=lab_definition, overwrite_check=not ctx.obj.quiet) + + +@lab.command() +def list() -> None: + """List all labs. Will list all labs in the current directory, subdirectories, and parent directories.""" + lab_files = search_for_file_pattern("*.lab.yaml") + + if lab_files: + for lab_file in sorted(set(lab_files)): + lab_definition = LabDefinition.from_yaml(lab_file) + console.print( + f"[bold]{lab_definition.name}[/]: {lab_definition.description} ({lab_file})", + ) + if lab_definition.workcells: + console.print(" Workcells:") + for name, workcell in lab_definition.workcells.items(): + if isinstance(workcell, str): + console.print(f" - {name}: {workcell}") + else: + console.print(f" - {name}: {workcell.description}") + else: + print("No lab definitions found") + + +@lab.command() +@pass_lab +def info(ctx: LabContext) -> None: + """Get information about a lab.""" + if ctx.lab_def: + pprint(ctx.lab_def) + else: + console.print( + "No lab found. Specify lab by name or path. If you don't have a lab file, you can create one with 'madsci lab create'.", + ) + + +@lab.command() +@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt.") +@pass_lab +def delete(ctx: LabContext, yes: bool) -> None: + """Delete a lab.""" + if ctx.lab_def and ctx.path: + console.print(f"Deleting lab: {ctx.lab_def.name} ({ctx.path})") + if yes or ctx.quiet or prompt_yes_no("Are you sure?"): + ctx.path.unlink() + console.print(f"Deleted {ctx.path}") + else: + console.print( + "No lab found. Specify lab by name or path. If you don't have a lab file, you can create one with 'madsci lab create'.", + ) + + +@lab.command() +@pass_lab +def validate(ctx: LabContext) -> None: + """Validate a lab definition file.""" + if ctx.lab_def: + console.print(ctx.lab_def) + else: + console.print( + "No lab found. Specify lab by name or path. If you don't have a lab definition file, you can create one with 'madsci lab create'.", + ) + + +def run_command(command: str, lab: LabDefinition, path: Path) -> None: + """Run a command in a lab.""" + console.print( + f"Running command: [bold]{command}[/] ({lab.commands[command]}) in lab: [bold]{lab.name}[/] ({path})", + ) + print(os.popen(lab.commands[command]).read()) # noqa: S605 + + +@lab.command() +@click.argument("command", type=str) +@pass_lab +def run(ctx: LabContext, command: str) -> None: + """Run a command in a lab.""" + if not ctx.lab_def: + console.print( + "No lab found. Specify lab by name or path. If you don't have a lab file, you can create one with 'madsci lab create'.", + ) + return + + if ctx.lab_def.commands.get(command): + run_command(command, ctx.lab_def, ctx.path) + else: + console.print( + f"Command [bold]{command}[/] not found in lab definition: [bold]{ctx.lab_def.name}[/] ({ctx.path})", + ) + + +@lab.command() +@click.option("--command_name", "--name", "-n", type=str, required=False) +@click.option("--command", "-c", type=str, required=False) +@pass_lab +def add_command(ctx: LabContext, command_name: str, command: str) -> None: + """Add a command to a lab definition.""" + if not ctx.lab_def: + console.print( + "No lab found. Specify lab by name or path. If you don't have a lab file, you can create one with 'madsci lab create'.", + ) + return + + if not command_name: + command_name = prompt_for_input("Command Name", required=True) + if not command: + command = prompt_for_input("Command", required=True) + + if command_name in ctx.lab_def.commands: + console.print( + f"Command [bold]{command_name}[/] already exists in lab definition: [bold]{ctx.lab_def.name}[/] ({ctx.path})", + ) + if not prompt_yes_no("Do you want to overwrite it?", default="no"): + return + + ctx.lab_def.commands[command_name] = command + save_model(ctx.path, ctx.lab_def, overwrite_check=False) + console.print( + f"Added command [bold]{command_name}[/] to lab: [bold]{ctx.lab_def.name}[/]", + ) + + +@lab.command() +@click.argument("command_name", type=str, required=False) +@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt.") +@pass_lab +def delete_command(ctx: LabContext, command_name: str, yes: bool) -> None: + """Delete a command from a lab definition.""" + if not ctx.lab_def: + console.print( + "No lab found. Specify lab by name or path. If you don't have a lab file, you can create one with 'madsci lab create'.", + ) + return + + if not command_name: + command_name = prompt_for_input("Command Name", required=True) + + if command_name in ctx.lab_def.commands: + if ( + yes + or ctx.quiet + or prompt_yes_no( + f"Are you sure you want to delete command [bold]{command_name}[/]?", + default="no", + ) + ): + del ctx.lab_def.commands[command_name] + save_model(ctx.path, ctx.lab_def, overwrite_check=False) + console.print( + f"Deleted command [bold]{command_name}[/] from lab: [bold]{ctx.lab_def.name}[/]", + ) + else: + console.print( + f"Command [bold]{command_name}[/] not found in lab definition: [bold]{ctx.lab_def.name}[/] ({ctx.path})", + ) diff --git a/madsci/madsci_client/madsci/client/cli/module_cli.py b/madsci/madsci_client/madsci/client/cli/module_cli.py index 295edce..6149bd9 100644 --- a/madsci/madsci_client/madsci/client/cli/module_cli.py +++ b/madsci/madsci_client/madsci/client/cli/module_cli.py @@ -1,282 +1,282 @@ -"""Command Line Interface for managing MADSci Modules.""" - -from pathlib import Path -from typing import Optional - -import click -from click.core import Context -from rich.console import Console -from rich.pretty import pprint - -from madsci.common.types.module_types import ( - NODE_MODULE_CONFIG_TEMPLATES, - NodeModuleDefinition, - NodeType, -) -from madsci.common.utils import ( - prompt_for_input, - prompt_from_list, - prompt_yes_no, - save_model, - search_for_file_pattern, - to_snake_case, -) - -console = Console() - - -class ModuleContext: - """Context object for module commands.""" - - def __init__(self) -> None: - """Initialize the context object.""" - self.module: Optional[NodeModuleDefinition] = None - self.path: Optional[Path] = None - self.quiet: bool = False - - -pass_module = click.make_pass_decorator(ModuleContext) - - -def find_module(name: Optional[str], path: Optional[str]) -> ModuleContext: - """Find a module by name or path.""" - module_context = ModuleContext() - - if path: - module_context.path = Path(path) - if module_context.path.exists(): - module_context.module = NodeModuleDefinition.from_yaml(path) - return module_context - - module_files = search_for_file_pattern("*.module.yaml") - for module_file in module_files: - module_def = NodeModuleDefinition.from_yaml(module_file) - if not name or module_def.module_name == name: - module_context.path = Path(module_file) - module_context.module = module_def - return module_context - - return module_context - - -@click.group() -@click.option("--name", "-n", type=str, help="Name of the module.") -@click.option("--path", "-p", type=str, help="Path to the module definition file.") -@click.pass_context -def module(ctx: Context, name: Optional[str], path: Optional[str]) -> None: - """Manage modules.""" - ctx.obj = find_module(name, path) - ctx.obj.quiet = ctx.parent.params.get("quiet") - - -@module.command() -@click.option("--name", "-n", type=str, help="The name of the module.") -@click.option("--path", "-p", type=str, help="The path to the module definition file.") -@click.option("--description", "-d", type=str, help="The description of the module.") -@click.option("--module_type", "-t", type=str, help="The type of the module.") -@click.option( - "--config_template", - "-c", - type=str, - help="The template of the module configuration to use.", -) -@click.pass_context -def create( # noqa: PLR0913 - ctx: Context, - name: Optional[str], - path: Optional[str], - description: Optional[str], - module_type: Optional[str], - config_template: Optional[str], -) -> None: - """Create a new module.""" - name = name if name else ctx.parent.params.get("name") - name = ( - name - if name - else prompt_for_input("Module Name", required=True, quiet=ctx.obj.quiet) - ) - description = ( - description - if description - else prompt_for_input("Module Description", quiet=ctx.obj.quiet) - ) - if not module_type or module_type not in [ - module_type.value for module_type in NodeType - ]: - module_type = prompt_from_list( - "Module Type", - [module_type.value for module_type in NodeType], - default=NodeType.DEVICE.value, - quiet=ctx.obj.quiet, - ) - config_keys = [] - for key in NODE_MODULE_CONFIG_TEMPLATES: - config_keys.append(key) - if not config_template or config_template not in config_keys: - if prompt_yes_no( - "Do you want to use a configuration template to add configuration options to your module?", - default="no", - quiet=ctx.obj.quiet, - ): - template_name = prompt_from_list( - "Module Configuration Template", - config_keys, - default=config_keys[0], - ) - config_template = NODE_MODULE_CONFIG_TEMPLATES[template_name] - else: - config_template = [] - else: - config_template = NODE_MODULE_CONFIG_TEMPLATES[config_template] - - module_definition = NodeModuleDefinition( - module_name=name, - module_description=description, - module_type=module_type, - config=config_template, - ) - console.print(module_definition) - - if not path: - path = ctx.parent.params.get("path") - if not path: - default_path = Path.cwd() / f"{to_snake_case(name)}.module.yaml" - new_path = prompt_for_input( - "Path to save Module Definition file", - default=str(default_path), - quiet=ctx.obj.quiet, - ) - if new_path: - path = Path(new_path) - save_model(path=path, model=module_definition, overwrite_check=not ctx.obj.quiet) - - console.print() - console.print( - f"Created module definition: [bold]{module_definition.module_name}[/] ({path}). Next, you can define your module and add commands to control it with 'madsci module add-command'.", - ) - console.print( - "[red]Note:[/] You need to define a node before you can use this module, see 'madsci node create'.", - ) - - -@module.command() -def list() -> None: - """List all modules.""" - module_files = search_for_file_pattern("*.module.yaml") - - if module_files: - for module_file in sorted(set(module_files)): - module_definition = NodeModuleDefinition.from_yaml(module_file) - console.print( - f"[bold]{module_definition.module_name}[/]: {module_definition.module_description} ({module_file})", - ) - else: - console.print("No module definitions found") - - -@module.command() -@pass_module -def info(ctx: ModuleContext) -> None: - """Get information about a module.""" - if ctx.module: - pprint(ctx.module) - else: - console.print( - "No module found. Specify module by name or path. If you don't have a module file, you can create one with 'madsci module create'.", - ) - - -@module.command() -@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt.") -@pass_module -def delete(ctx: ModuleContext, yes: bool) -> None: - """Delete a module.""" - if ctx.module and ctx.path: - console.print(f"Deleting module: {ctx.module.module_name} ({ctx.path})") - if yes or prompt_yes_no("Are you sure?", quiet=ctx.quiet): - ctx.path.unlink() - console.print(f"Deleted {ctx.path}") - else: - console.print( - "No module found. Specify module by name or path. If you don't have a module file, you can create one with 'madsci module create'.", - ) - - -@module.command() -@pass_module -def validate(ctx: ModuleContext) -> None: - """Validate a module definition file.""" - if ctx.module: - console.print(ctx.module) - else: - console.print( - "No module found. Specify module by name or path. If you don't have a module definition file, you can create one with 'madsci module create'.", - ) - - -@module.command() -@click.option("--command_name", "--name", "-n", type=str, required=False) -@click.option("--command", "-c", type=str, required=False) -@pass_module -def add_command(ctx: ModuleContext, command_name: str, command: str) -> None: - """Add a command to a module definition.""" - if not ctx.module: - console.print( - "No module found. Specify module by name or path. If you don't have a module file, you can create one with 'madsci module create'.", - ) - return - - if not command_name: - command_name = prompt_for_input("Command Name", required=True) - if not command: - command = prompt_for_input("Command", required=True) - - if ctx.module.commands is None: - ctx.module.commands = {} - - if command_name in ctx.module.commands: - console.print( - f"Command [bold]{command_name}[/] already exists in module definition: [bold]{ctx.module.module_name}[/] ({ctx.path})", - ) - if not prompt_yes_no("Do you want to overwrite it?", default="no"): - return - - ctx.module.commands[command_name] = command - save_model(ctx.path, ctx.module, overwrite_check=False) - console.print( - f"Added command [bold]{command_name}[/] to module: [bold]{ctx.module.module_name}[/]", - ) - - -@module.command() -@click.option("--command_name", "--name", "-n", type=str, required=False) -@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt.") -@pass_module -def delete_command(ctx: ModuleContext, command_name: str, yes: bool) -> None: - """Delete a command from a module definition.""" - quiet = ctx.quiet or yes - if not ctx.module: - console.print( - "No module found. Specify module by name or path. If you don't have a module file, you can create one with 'madsci module create'.", - ) - return - - if not command_name: - command_name = prompt_for_input("Command Name", required=True, quiet=quiet) - - if ctx.module.commands and command_name in ctx.module.commands: - if yes or prompt_yes_no( - f"Are you sure you want to delete command [bold]{command_name}[/]?", - default="no", - quiet=quiet, - ): - del ctx.module.commands[command_name] - save_model(ctx.path, ctx.module, overwrite_check=False) - console.print( - f"Deleted command [bold]{command_name}[/] from module: [bold]{ctx.module.module_name}[/]", - ) - else: - console.print( - f"Command [bold]{command_name}[/] not found in module definition: [bold]{ctx.module.module_name}[/] ({ctx.path})", - ) +"""Command Line Interface for managing MADSci Modules.""" + +from pathlib import Path +from typing import Optional + +import click +from click.core import Context +from rich.console import Console +from rich.pretty import pprint + +from madsci.common.types.module_types import ( + NODE_MODULE_CONFIG_TEMPLATES, + NodeModuleDefinition, + NodeType, +) +from madsci.common.utils import ( + prompt_for_input, + prompt_from_list, + prompt_yes_no, + save_model, + search_for_file_pattern, + to_snake_case, +) + +console = Console() + + +class ModuleContext: + """Context object for module commands.""" + + def __init__(self) -> None: + """Initialize the context object.""" + self.module: Optional[NodeModuleDefinition] = None + self.path: Optional[Path] = None + self.quiet: bool = False + + +pass_module = click.make_pass_decorator(ModuleContext) + + +def find_module(name: Optional[str], path: Optional[str]) -> ModuleContext: + """Find a module by name or path.""" + module_context = ModuleContext() + + if path: + module_context.path = Path(path) + if module_context.path.exists(): + module_context.module = NodeModuleDefinition.from_yaml(path) + return module_context + + module_files = search_for_file_pattern("*.module.yaml") + for module_file in module_files: + module_def = NodeModuleDefinition.from_yaml(module_file) + if not name or module_def.module_name == name: + module_context.path = Path(module_file) + module_context.module = module_def + return module_context + + return module_context + + +@click.group() +@click.option("--name", "-n", type=str, help="Name of the module.") +@click.option("--path", "-p", type=str, help="Path to the module definition file.") +@click.pass_context +def module(ctx: Context, name: Optional[str], path: Optional[str]) -> None: + """Manage modules.""" + ctx.obj = find_module(name, path) + ctx.obj.quiet = ctx.parent.params.get("quiet") + + +@module.command() +@click.option("--name", "-n", type=str, help="The name of the module.") +@click.option("--path", "-p", type=str, help="The path to the module definition file.") +@click.option("--description", "-d", type=str, help="The description of the module.") +@click.option("--module_type", "-t", type=str, help="The type of the module.") +@click.option( + "--config_template", + "-c", + type=str, + help="The template of the module configuration to use.", +) +@click.pass_context +def create( # noqa: PLR0913 + ctx: Context, + name: Optional[str], + path: Optional[str], + description: Optional[str], + module_type: Optional[str], + config_template: Optional[str], +) -> None: + """Create a new module.""" + name = name if name else ctx.parent.params.get("name") + name = ( + name + if name + else prompt_for_input("Module Name", required=True, quiet=ctx.obj.quiet) + ) + description = ( + description + if description + else prompt_for_input("Module Description", quiet=ctx.obj.quiet) + ) + if not module_type or module_type not in [ + module_type.value for module_type in NodeType + ]: + module_type = prompt_from_list( + "Module Type", + [module_type.value for module_type in NodeType], + default=NodeType.DEVICE.value, + quiet=ctx.obj.quiet, + ) + config_keys = [] + for key in NODE_MODULE_CONFIG_TEMPLATES: + config_keys.append(key) + if not config_template or config_template not in config_keys: + if prompt_yes_no( + "Do you want to use a configuration template to add configuration options to your module?", + default="no", + quiet=ctx.obj.quiet, + ): + template_name = prompt_from_list( + "Module Configuration Template", + config_keys, + default=config_keys[0], + ) + config_template = NODE_MODULE_CONFIG_TEMPLATES[template_name] + else: + config_template = [] + else: + config_template = NODE_MODULE_CONFIG_TEMPLATES[config_template] + + module_definition = NodeModuleDefinition( + module_name=name, + module_description=description, + module_type=module_type, + config=config_template, + ) + console.print(module_definition) + + if not path: + path = ctx.parent.params.get("path") + if not path: + default_path = Path.cwd() / f"{to_snake_case(name)}.module.yaml" + new_path = prompt_for_input( + "Path to save Module Definition file", + default=str(default_path), + quiet=ctx.obj.quiet, + ) + if new_path: + path = Path(new_path) + save_model(path=path, model=module_definition, overwrite_check=not ctx.obj.quiet) + + console.print() + console.print( + f"Created module definition: [bold]{module_definition.module_name}[/] ({path}). Next, you can define your module and add commands to control it with 'madsci module add-command'.", + ) + console.print( + "[red]Note:[/] You need to define a node before you can use this module, see 'madsci node create'.", + ) + + +@module.command() +def list() -> None: + """List all modules.""" + module_files = search_for_file_pattern("*.module.yaml") + + if module_files: + for module_file in sorted(set(module_files)): + module_definition = NodeModuleDefinition.from_yaml(module_file) + console.print( + f"[bold]{module_definition.module_name}[/]: {module_definition.module_description} ({module_file})", + ) + else: + console.print("No module definitions found") + + +@module.command() +@pass_module +def info(ctx: ModuleContext) -> None: + """Get information about a module.""" + if ctx.module: + pprint(ctx.module) + else: + console.print( + "No module found. Specify module by name or path. If you don't have a module file, you can create one with 'madsci module create'.", + ) + + +@module.command() +@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt.") +@pass_module +def delete(ctx: ModuleContext, yes: bool) -> None: + """Delete a module.""" + if ctx.module and ctx.path: + console.print(f"Deleting module: {ctx.module.module_name} ({ctx.path})") + if yes or prompt_yes_no("Are you sure?", quiet=ctx.quiet): + ctx.path.unlink() + console.print(f"Deleted {ctx.path}") + else: + console.print( + "No module found. Specify module by name or path. If you don't have a module file, you can create one with 'madsci module create'.", + ) + + +@module.command() +@pass_module +def validate(ctx: ModuleContext) -> None: + """Validate a module definition file.""" + if ctx.module: + console.print(ctx.module) + else: + console.print( + "No module found. Specify module by name or path. If you don't have a module definition file, you can create one with 'madsci module create'.", + ) + + +@module.command() +@click.option("--command_name", "--name", "-n", type=str, required=False) +@click.option("--command", "-c", type=str, required=False) +@pass_module +def add_command(ctx: ModuleContext, command_name: str, command: str) -> None: + """Add a command to a module definition.""" + if not ctx.module: + console.print( + "No module found. Specify module by name or path. If you don't have a module file, you can create one with 'madsci module create'.", + ) + return + + if not command_name: + command_name = prompt_for_input("Command Name", required=True) + if not command: + command = prompt_for_input("Command", required=True) + + if ctx.module.commands is None: + ctx.module.commands = {} + + if command_name in ctx.module.commands: + console.print( + f"Command [bold]{command_name}[/] already exists in module definition: [bold]{ctx.module.module_name}[/] ({ctx.path})", + ) + if not prompt_yes_no("Do you want to overwrite it?", default="no"): + return + + ctx.module.commands[command_name] = command + save_model(ctx.path, ctx.module, overwrite_check=False) + console.print( + f"Added command [bold]{command_name}[/] to module: [bold]{ctx.module.module_name}[/]", + ) + + +@module.command() +@click.option("--command_name", "--name", "-n", type=str, required=False) +@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt.") +@pass_module +def delete_command(ctx: ModuleContext, command_name: str, yes: bool) -> None: + """Delete a command from a module definition.""" + quiet = ctx.quiet or yes + if not ctx.module: + console.print( + "No module found. Specify module by name or path. If you don't have a module file, you can create one with 'madsci module create'.", + ) + return + + if not command_name: + command_name = prompt_for_input("Command Name", required=True, quiet=quiet) + + if ctx.module.commands and command_name in ctx.module.commands: + if yes or prompt_yes_no( + f"Are you sure you want to delete command [bold]{command_name}[/]?", + default="no", + quiet=quiet, + ): + del ctx.module.commands[command_name] + save_model(ctx.path, ctx.module, overwrite_check=False) + console.print( + f"Deleted command [bold]{command_name}[/] from module: [bold]{ctx.module.module_name}[/]", + ) + else: + console.print( + f"Command [bold]{command_name}[/] not found in module definition: [bold]{ctx.module.module_name}[/] ({ctx.path})", + ) diff --git a/madsci/madsci_client/madsci/client/cli/node_cli.py b/madsci/madsci_client/madsci/client/cli/node_cli.py index ac0aa08..4fe13a9 100644 --- a/madsci/madsci_client/madsci/client/cli/node_cli.py +++ b/madsci/madsci_client/madsci/client/cli/node_cli.py @@ -1,447 +1,447 @@ -"""Command Line Interface for managing MADSci Nodes.""" - -import os -from pathlib import Path -from typing import Optional - -import click -from click.core import Context -from rich.console import Console -from rich.pretty import pprint - -from madsci.common.types.module_types import NodeModuleDefinition -from madsci.common.types.node_types import NodeDefinition -from madsci.common.types.workcell_types import WorkcellDefinition -from madsci.common.utils import ( - PathLike, - prompt_for_input, - prompt_from_list, - prompt_yes_no, - relative_path, - save_model, - search_for_file_pattern, - to_snake_case, -) - -console = Console() - - -class NodeContext: - """Context object for node commands.""" - - def __init__(self) -> None: - """Initialize the context object.""" - self.node_def: Optional[NodeDefinition] = None - self.path: Optional[Path] = None - self.workcell_def: Optional[WorkcellDefinition] = None - self.quiet: bool = False - - -pass_node = click.make_pass_decorator(NodeContext) - - -def find_node(name: Optional[str], path: Optional[str]) -> NodeContext: - """Find a node by name or path, including within workcell files.""" - node_context = NodeContext() - - if path: - node_context.path = Path(path) - if node_context.path.exists(): - if path.endswith(".node.yaml"): - node_context.node_def = NodeDefinition.from_yaml(path) - elif path.endswith(".workcell.yaml"): - node_context.workcell_def = WorkcellDefinition.from_yaml(path) - node_context.node_def = find_node_in_workcell( - name, - node_context.workcell_def, - ) - return node_context - - node_files = search_for_file_pattern("*.node.yaml") - for node_file in node_files: - node_def = NodeDefinition.from_yaml(node_file) - if not name or node_def.node_name == name: - node_context.path = Path(node_file) - node_context.node_def = node_def - return node_context - - workcell_files = search_for_file_pattern("*.workcell.yaml") - for workcell_file in workcell_files: - workcell_def = WorkcellDefinition.from_yaml(workcell_file) - node_def = find_node_in_workcell(name, workcell_def) - if node_def: - node_context.path = Path(workcell_file) - node_context.workcell_def = workcell_def - node_context.node_def = node_def - return node_context - - return node_context - - -def find_node_in_workcell( - name: Optional[str], - workcell_def: WorkcellDefinition, -) -> Optional[NodeDefinition]: - """Find a node definition within a workcell.""" - for node in workcell_def.nodes: - if isinstance(node, NodeDefinition) and (not name or node.node_name == name): - return node - if isinstance(node, str) and node.endswith(".node.yaml"): - node_def = NodeDefinition.from_yaml(node) - if not name or node_def.node_name == name: - return node_def - return None - - -@click.group() -@click.option("--name", "-n", type=str, help="The name of the node to operate on.") -@click.option( - "--path", - "-p", - type=str, - help="The path to the node or workcell definition file.", -) -@click.pass_context -def node(ctx: Context, name: Optional[str], path: Optional[str]) -> None: - """Manage nodes.""" - ctx.obj = find_node(name, path) - ctx.obj.quiet = ctx.parent.params.get("quiet") - - -@node.command() -@click.option("--name", "-n", type=str, help="The name of the node.", required=False) -@click.option( - "--path", - "-p", - type=str, - help="The path to save the node definition file.", -) -@click.option("--description", "-d", type=str, help="The description of the node.") -@click.option( - "--module_name", - "-m", - type=str, - help="The name of the module to use for the node.", -) -@click.option( - "--module_path", - "-m", - type=str, - help="Path to the module definition file to use for the node.", -) -@click.option( - "--standalone", - "-s", - is_flag=True, - help="Don't add node to any workcell.", -) -@click.pass_context -def create( # noqa: PLR0913 - ctx: Context, - name: Optional[str], - path: Optional[str], - description: Optional[str], - module_name: Optional[str], - module_path: Optional[str], - standalone: bool, -) -> None: - """Create a new node.""" - commands = {} - name = name if name else ctx.parent.params.get("name") - name = ( - name - if name - else prompt_for_input("Node Name", required=True, quiet=ctx.obj.quiet) - ) - description = ( - description - if description - else prompt_for_input("Node Description", quiet=ctx.obj.quiet) - ) - if module_name or module_path: - from madsci.client.cli.module_cli import find_module - - module_path = find_module(module_name, module_path).path - else: - modules = search_for_file_pattern("*.module.yaml") - if modules: - module_path = prompt_from_list( - prompt="Module Definition Files", - options=modules, - default=modules[0], - required=True, - quiet=ctx.obj.quiet, - ) - try: - module_definition = NodeModuleDefinition.from_yaml(module_path) - commands = module_definition.commands - except Exception as e: - console.print(f"Error loading module definition file: {e}") - return - - node_definition = NodeDefinition( - node_name=name, - node_description=description, - module_definition=Path(module_path).absolute() if module_path else None, - commands=commands, - ) - console.print(node_definition) - - path = path if path else ctx.parent.params.get("path") - if not path: - if Path.cwd().name == "nodes": - default_path = Path.cwd() / f"{to_snake_case(name)}.node.yaml" - else: - default_path = Path.cwd() / "nodes" / f"{to_snake_case(name)}.node.yaml" - new_path = prompt_for_input( - "Path to save Node Definition file", - default=str(default_path), - quiet=ctx.obj.quiet, - ) - if new_path: - path = Path(new_path) - if not path.parent.exists(): - console.print(f"Creating directory: {path.parent}") - path.parent.mkdir(parents=True, exist_ok=True) - path = Path(path).absolute() - node_definition.module_definition = relative_path( - source=path.parent.absolute(), - target=Path(module_path).absolute(), - ) - node_definition.config = NodeModuleDefinition.from_yaml(module_path).config - save_model(path=path, model=node_definition, overwrite_check=not ctx.obj.quiet) - - # *Handle workcell integration - if not standalone and prompt_yes_no( - "Add node to a workcell?", - default=True, - quiet=ctx.obj.quiet, - ): - add_node_to_workcell(ctx, name, path, node_definition) - - -def add_node_to_workcell( - ctx: Context, - name: str, - path: PathLike, - node_definition: NodeDefinition, -) -> None: - """Adds a node definition to a workcell definition's 'nodes' section""" - workcell_files = search_for_file_pattern("*.workcell.yaml") - if workcell_files: - if ctx.obj.quiet: - # *In quiet mode, automatically add to first workcell - workcell_path = workcell_files[0] - else: - workcell_path = prompt_from_list( - prompt="Add node to workcell", - options=workcell_files, - default=workcell_files[0], - ) - - if workcell_path: - workcell_def = WorkcellDefinition.from_yaml(workcell_path) - # *Calculate relative path from workcell to node - workcell_dir = Path(workcell_path).parent - rel_path = str( - relative_path(target=path.absolute(), source=workcell_dir.absolute()), - ) - - # *Add node to workcell if not already present - workcell_def.nodes[node_definition.node_name] = rel_path - save_model(workcell_path, workcell_def, overwrite_check=False) - console.print( - f"Added node [bold]{name}[/] to workcell: [bold]{workcell_path}[/]", - ) - - -@node.command() -def list() -> None: - """List all nodes, including those in workcell files.""" - node_files = search_for_file_pattern("*.node.yaml") - workcell_files = search_for_file_pattern("*.workcell.yaml") - - nodes_found = False - - if node_files: - for node_file in sorted(set(node_files)): - node_definition = NodeDefinition.from_yaml(node_file) - console.print( - f"[bold]{node_definition.node_name}[/]: {node_definition.node_description} ({node_file})", - ) - nodes_found = True - - for workcell_file in workcell_files: - workcell_def = WorkcellDefinition.from_yaml(workcell_file) - for node in workcell_def.nodes: - if isinstance(node, NodeDefinition): - console.print( - f"[bold]{node.node_name}[/]: {node.node_description} (in {workcell_file})", - ) - nodes_found = True - - if not nodes_found: - console.print("No node definitions found") - - -@node.command() -@pass_node -def info(ctx: NodeContext) -> None: - """Get information about a node.""" - if ctx.node_def: - pprint(ctx.node_def) - else: - console.print( - "No node found. Specify node by name or path. If you don't have a node file, you can create one with 'madsci node create'.", - ) - - -@node.command() -@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt.") -@pass_node -def delete(ctx: NodeContext, yes: bool) -> None: - """Delete a node.""" - if ctx.node_def and ctx.path: - console.print(f"Deleting node: {ctx.node_def.node_name} ({ctx.path})") - if yes or ctx.quiet or prompt_yes_no("Are you sure?"): - # First find all workcells that contain this node - workcell_files = search_for_file_pattern("*.workcell.yaml") - for workcell_file in workcell_files: - workcell_def = WorkcellDefinition.from_yaml(workcell_file) - if ctx.node_def.node_name in workcell_def.nodes and ( - yes - or ctx.quiet - or prompt_yes_no( - f"Remove from workcell [bold]{workcell_def.name}[/] ([italic]{workcell_file}[/])?", - default=True, - ) - ): - del workcell_def.nodes[ctx.node_def.node_name] - save_model(workcell_file, workcell_def, overwrite_check=False) - console.print(f"Removed from workcell: {workcell_file}") - - # Finally delete the node file - ctx.path.unlink() - console.print(f"Deleted {ctx.path}") - else: - console.print( - "No node found. Specify node by name or path. If you don't have a node file, you can create one with 'madsci node create'.", - ) - - -@node.command() -@pass_node -def validate(ctx: NodeContext) -> None: - """Validate a node definition file.""" - if ctx.node_def: - console.print(ctx.node_def) - else: - console.print( - "No node found. Specify node by name or path. If you don't have a node definition file, you can create one with 'madsci node create'.", - ) - - -@node.command() -@click.option("--command_name", "--name", "-n", type=str, required=False) -@click.option("--command", "-c", type=str, required=False) -@pass_node -def add_command(ctx: NodeContext, command_name: str, command: str) -> None: - """Add a command to a node definition.""" - if not ctx.node_def: - console.print( - "No node found. Specify node by name or path. If you don't have a node file, you can create one with 'madsci node create'.", - ) - return - - if not command_name: - command_name = prompt_for_input( - "Command Name", - required=True, - quiet=ctx.obj.quiet, - ) - if not command: - command = prompt_for_input("Command", required=True, quiet=ctx.obj.quiet) - - if command_name in ctx.node_def.commands: - console.print( - f"Command [bold]{command_name}[/] already exists in node definition: [bold]{ctx.node_def.node_name}[/] ({ctx.path})", - ) - if not prompt_yes_no( - "Do you want to overwrite it?", - default="no", - quiet=ctx.quiet, - ): - return - - ctx.node_def.commands[command_name] = command - save_model(ctx.path, ctx.node_def, overwrite_check=False) - console.print( - f"Added command [bold]{command_name}[/] to node: [bold]{ctx.node_def.node_name}[/]", - ) - - -@node.command() -@click.argument("command_name", type=str) -@pass_node -def run(ctx: NodeContext, command_name: str) -> None: - """Run a command in a node.""" - if not ctx.node_def: - console.print( - "No node found. Specify node by name or path. If you don't have a node file, you can create one with 'madsci node create'.", - ) - return - - if command_name in ctx.node_def.commands: - command = ctx.node_def.commands[command_name] - console.print( - f"Running command: [bold]{command_name}[/] ({command}) in node: [bold]{ctx.node_def.node_name}[/] ({ctx.path})", - ) - print(os.popen(command).read()) # noqa: S605 - else: - console.print( - f"Command [bold]{command_name}[/] not found in node definition: [bold]{ctx.node_def.node_name}[/] ({ctx.path})", - ) - - -@node.command() -@click.argument("command_name", type=str, required=False) -@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt.") -@pass_node -def delete_command(ctx: NodeContext, command_name: str, yes: bool) -> None: - """Delete a command from a node definition.""" - if not ctx.node_def: - console.print( - "No node found. Specify node by name or path. If you don't have a node file, you can create one with 'madsci node create'.", - ) - return - - if not command_name: - if not ctx.node_def.commands: - console.print("No commands found in node definition.") - return - command_name = prompt_from_list( - "Select command to delete", - options=list(ctx.node_def.commands.keys()), - required=True, - quiet=ctx.obj.quiet, - ) - - if command_name in ctx.node_def.commands: - if ( - yes - or ctx.quiet - or prompt_yes_no( - f"Are you sure you want to delete command [bold]{command_name}[/]?", - default="no", - quiet=ctx.obj.quiet, - ) - ): - del ctx.node_def.commands[command_name] - save_model(ctx.path, ctx.node_def, overwrite_check=False) - console.print( - f"Deleted command [bold]{command_name}[/] from node: [bold]{ctx.node_def.node_name}[/]", - ) - else: - console.print( - f"Command [bold]{command_name}[/] not found in node definition: [bold]{ctx.node_def.node_name}[/] ({ctx.path})", - ) +"""Command Line Interface for managing MADSci Nodes.""" + +import os +from pathlib import Path +from typing import Optional + +import click +from click.core import Context +from rich.console import Console +from rich.pretty import pprint + +from madsci.common.types.module_types import NodeModuleDefinition +from madsci.common.types.node_types import NodeDefinition +from madsci.common.types.workcell_types import WorkcellDefinition +from madsci.common.utils import ( + PathLike, + prompt_for_input, + prompt_from_list, + prompt_yes_no, + relative_path, + save_model, + search_for_file_pattern, + to_snake_case, +) + +console = Console() + + +class NodeContext: + """Context object for node commands.""" + + def __init__(self) -> None: + """Initialize the context object.""" + self.node_def: Optional[NodeDefinition] = None + self.path: Optional[Path] = None + self.workcell_def: Optional[WorkcellDefinition] = None + self.quiet: bool = False + + +pass_node = click.make_pass_decorator(NodeContext) + + +def find_node(name: Optional[str], path: Optional[str]) -> NodeContext: + """Find a node by name or path, including within workcell files.""" + node_context = NodeContext() + + if path: + node_context.path = Path(path) + if node_context.path.exists(): + if path.endswith(".node.yaml"): + node_context.node_def = NodeDefinition.from_yaml(path) + elif path.endswith(".workcell.yaml"): + node_context.workcell_def = WorkcellDefinition.from_yaml(path) + node_context.node_def = find_node_in_workcell( + name, + node_context.workcell_def, + ) + return node_context + + node_files = search_for_file_pattern("*.node.yaml") + for node_file in node_files: + node_def = NodeDefinition.from_yaml(node_file) + if not name or node_def.node_name == name: + node_context.path = Path(node_file) + node_context.node_def = node_def + return node_context + + workcell_files = search_for_file_pattern("*.workcell.yaml") + for workcell_file in workcell_files: + workcell_def = WorkcellDefinition.from_yaml(workcell_file) + node_def = find_node_in_workcell(name, workcell_def) + if node_def: + node_context.path = Path(workcell_file) + node_context.workcell_def = workcell_def + node_context.node_def = node_def + return node_context + + return node_context + + +def find_node_in_workcell( + name: Optional[str], + workcell_def: WorkcellDefinition, +) -> Optional[NodeDefinition]: + """Find a node definition within a workcell.""" + for node in workcell_def.nodes: + if isinstance(node, NodeDefinition) and (not name or node.node_name == name): + return node + if isinstance(node, str) and node.endswith(".node.yaml"): + node_def = NodeDefinition.from_yaml(node) + if not name or node_def.node_name == name: + return node_def + return None + + +@click.group() +@click.option("--name", "-n", type=str, help="The name of the node to operate on.") +@click.option( + "--path", + "-p", + type=str, + help="The path to the node or workcell definition file.", +) +@click.pass_context +def node(ctx: Context, name: Optional[str], path: Optional[str]) -> None: + """Manage nodes.""" + ctx.obj = find_node(name, path) + ctx.obj.quiet = ctx.parent.params.get("quiet") + + +@node.command() +@click.option("--name", "-n", type=str, help="The name of the node.", required=False) +@click.option( + "--path", + "-p", + type=str, + help="The path to save the node definition file.", +) +@click.option("--description", "-d", type=str, help="The description of the node.") +@click.option( + "--module_name", + "-m", + type=str, + help="The name of the module to use for the node.", +) +@click.option( + "--module_path", + "-m", + type=str, + help="Path to the module definition file to use for the node.", +) +@click.option( + "--standalone", + "-s", + is_flag=True, + help="Don't add node to any workcell.", +) +@click.pass_context +def create( # noqa: PLR0913 + ctx: Context, + name: Optional[str], + path: Optional[str], + description: Optional[str], + module_name: Optional[str], + module_path: Optional[str], + standalone: bool, +) -> None: + """Create a new node.""" + commands = {} + name = name if name else ctx.parent.params.get("name") + name = ( + name + if name + else prompt_for_input("Node Name", required=True, quiet=ctx.obj.quiet) + ) + description = ( + description + if description + else prompt_for_input("Node Description", quiet=ctx.obj.quiet) + ) + if module_name or module_path: + from madsci.client.cli.module_cli import find_module + + module_path = find_module(module_name, module_path).path + else: + modules = search_for_file_pattern("*.module.yaml") + if modules: + module_path = prompt_from_list( + prompt="Module Definition Files", + options=modules, + default=modules[0], + required=True, + quiet=ctx.obj.quiet, + ) + try: + module_definition = NodeModuleDefinition.from_yaml(module_path) + commands = module_definition.commands + except Exception as e: + console.print(f"Error loading module definition file: {e}") + return + + node_definition = NodeDefinition( + node_name=name, + node_description=description, + module_definition=Path(module_path).absolute() if module_path else None, + commands=commands, + ) + console.print(node_definition) + + path = path if path else ctx.parent.params.get("path") + if not path: + if Path.cwd().name == "nodes": + default_path = Path.cwd() / f"{to_snake_case(name)}.node.yaml" + else: + default_path = Path.cwd() / "nodes" / f"{to_snake_case(name)}.node.yaml" + new_path = prompt_for_input( + "Path to save Node Definition file", + default=str(default_path), + quiet=ctx.obj.quiet, + ) + if new_path: + path = Path(new_path) + if not path.parent.exists(): + console.print(f"Creating directory: {path.parent}") + path.parent.mkdir(parents=True, exist_ok=True) + path = Path(path).absolute() + node_definition.module_definition = relative_path( + source=path.parent.absolute(), + target=Path(module_path).absolute(), + ) + node_definition.config = NodeModuleDefinition.from_yaml(module_path).config + save_model(path=path, model=node_definition, overwrite_check=not ctx.obj.quiet) + + # *Handle workcell integration + if not standalone and prompt_yes_no( + "Add node to a workcell?", + default=True, + quiet=ctx.obj.quiet, + ): + add_node_to_workcell(ctx, name, path, node_definition) + + +def add_node_to_workcell( + ctx: Context, + name: str, + path: PathLike, + node_definition: NodeDefinition, +) -> None: + """Adds a node definition to a workcell definition's 'nodes' section""" + workcell_files = search_for_file_pattern("*.workcell.yaml") + if workcell_files: + if ctx.obj.quiet: + # *In quiet mode, automatically add to first workcell + workcell_path = workcell_files[0] + else: + workcell_path = prompt_from_list( + prompt="Add node to workcell", + options=workcell_files, + default=workcell_files[0], + ) + + if workcell_path: + workcell_def = WorkcellDefinition.from_yaml(workcell_path) + # *Calculate relative path from workcell to node + workcell_dir = Path(workcell_path).parent + rel_path = str( + relative_path(target=path.absolute(), source=workcell_dir.absolute()), + ) + + # *Add node to workcell if not already present + workcell_def.nodes[node_definition.node_name] = rel_path + save_model(workcell_path, workcell_def, overwrite_check=False) + console.print( + f"Added node [bold]{name}[/] to workcell: [bold]{workcell_path}[/]", + ) + + +@node.command() +def list() -> None: + """List all nodes, including those in workcell files.""" + node_files = search_for_file_pattern("*.node.yaml") + workcell_files = search_for_file_pattern("*.workcell.yaml") + + nodes_found = False + + if node_files: + for node_file in sorted(set(node_files)): + node_definition = NodeDefinition.from_yaml(node_file) + console.print( + f"[bold]{node_definition.node_name}[/]: {node_definition.node_description} ({node_file})", + ) + nodes_found = True + + for workcell_file in workcell_files: + workcell_def = WorkcellDefinition.from_yaml(workcell_file) + for node in workcell_def.nodes: + if isinstance(node, NodeDefinition): + console.print( + f"[bold]{node.node_name}[/]: {node.node_description} (in {workcell_file})", + ) + nodes_found = True + + if not nodes_found: + console.print("No node definitions found") + + +@node.command() +@pass_node +def info(ctx: NodeContext) -> None: + """Get information about a node.""" + if ctx.node_def: + pprint(ctx.node_def) + else: + console.print( + "No node found. Specify node by name or path. If you don't have a node file, you can create one with 'madsci node create'.", + ) + + +@node.command() +@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt.") +@pass_node +def delete(ctx: NodeContext, yes: bool) -> None: + """Delete a node.""" + if ctx.node_def and ctx.path: + console.print(f"Deleting node: {ctx.node_def.node_name} ({ctx.path})") + if yes or ctx.quiet or prompt_yes_no("Are you sure?"): + # First find all workcells that contain this node + workcell_files = search_for_file_pattern("*.workcell.yaml") + for workcell_file in workcell_files: + workcell_def = WorkcellDefinition.from_yaml(workcell_file) + if ctx.node_def.node_name in workcell_def.nodes and ( + yes + or ctx.quiet + or prompt_yes_no( + f"Remove from workcell [bold]{workcell_def.name}[/] ([italic]{workcell_file}[/])?", + default=True, + ) + ): + del workcell_def.nodes[ctx.node_def.node_name] + save_model(workcell_file, workcell_def, overwrite_check=False) + console.print(f"Removed from workcell: {workcell_file}") + + # Finally delete the node file + ctx.path.unlink() + console.print(f"Deleted {ctx.path}") + else: + console.print( + "No node found. Specify node by name or path. If you don't have a node file, you can create one with 'madsci node create'.", + ) + + +@node.command() +@pass_node +def validate(ctx: NodeContext) -> None: + """Validate a node definition file.""" + if ctx.node_def: + console.print(ctx.node_def) + else: + console.print( + "No node found. Specify node by name or path. If you don't have a node definition file, you can create one with 'madsci node create'.", + ) + + +@node.command() +@click.option("--command_name", "--name", "-n", type=str, required=False) +@click.option("--command", "-c", type=str, required=False) +@pass_node +def add_command(ctx: NodeContext, command_name: str, command: str) -> None: + """Add a command to a node definition.""" + if not ctx.node_def: + console.print( + "No node found. Specify node by name or path. If you don't have a node file, you can create one with 'madsci node create'.", + ) + return + + if not command_name: + command_name = prompt_for_input( + "Command Name", + required=True, + quiet=ctx.obj.quiet, + ) + if not command: + command = prompt_for_input("Command", required=True, quiet=ctx.obj.quiet) + + if command_name in ctx.node_def.commands: + console.print( + f"Command [bold]{command_name}[/] already exists in node definition: [bold]{ctx.node_def.node_name}[/] ({ctx.path})", + ) + if not prompt_yes_no( + "Do you want to overwrite it?", + default="no", + quiet=ctx.quiet, + ): + return + + ctx.node_def.commands[command_name] = command + save_model(ctx.path, ctx.node_def, overwrite_check=False) + console.print( + f"Added command [bold]{command_name}[/] to node: [bold]{ctx.node_def.node_name}[/]", + ) + + +@node.command() +@click.argument("command_name", type=str) +@pass_node +def run(ctx: NodeContext, command_name: str) -> None: + """Run a command in a node.""" + if not ctx.node_def: + console.print( + "No node found. Specify node by name or path. If you don't have a node file, you can create one with 'madsci node create'.", + ) + return + + if command_name in ctx.node_def.commands: + command = ctx.node_def.commands[command_name] + console.print( + f"Running command: [bold]{command_name}[/] ({command}) in node: [bold]{ctx.node_def.node_name}[/] ({ctx.path})", + ) + print(os.popen(command).read()) # noqa: S605 + else: + console.print( + f"Command [bold]{command_name}[/] not found in node definition: [bold]{ctx.node_def.node_name}[/] ({ctx.path})", + ) + + +@node.command() +@click.argument("command_name", type=str, required=False) +@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt.") +@pass_node +def delete_command(ctx: NodeContext, command_name: str, yes: bool) -> None: + """Delete a command from a node definition.""" + if not ctx.node_def: + console.print( + "No node found. Specify node by name or path. If you don't have a node file, you can create one with 'madsci node create'.", + ) + return + + if not command_name: + if not ctx.node_def.commands: + console.print("No commands found in node definition.") + return + command_name = prompt_from_list( + "Select command to delete", + options=list(ctx.node_def.commands.keys()), + required=True, + quiet=ctx.obj.quiet, + ) + + if command_name in ctx.node_def.commands: + if ( + yes + or ctx.quiet + or prompt_yes_no( + f"Are you sure you want to delete command [bold]{command_name}[/]?", + default="no", + quiet=ctx.obj.quiet, + ) + ): + del ctx.node_def.commands[command_name] + save_model(ctx.path, ctx.node_def, overwrite_check=False) + console.print( + f"Deleted command [bold]{command_name}[/] from node: [bold]{ctx.node_def.node_name}[/]", + ) + else: + console.print( + f"Command [bold]{command_name}[/] not found in node definition: [bold]{ctx.node_def.node_name}[/] ({ctx.path})", + ) diff --git a/madsci/madsci_client/madsci/client/cli/resources_cli.py b/madsci/madsci_client/madsci/client/cli/resources_cli.py index 24a1492..7a77096 100644 --- a/madsci/madsci_client/madsci/client/cli/resources_cli.py +++ b/madsci/madsci_client/madsci/client/cli/resources_cli.py @@ -1,364 +1,364 @@ -"""CLI for interacting with resources.""" - -from pathlib import Path -from typing import Optional - -import click -from click.core import Context -from rich.console import Console -from rich.pretty import pprint - -from madsci.common.types.resource_types import ( - RESOURCE_BASE_TYPES, - RESOURCE_DEFINITION_MAP, - RESOURCE_TYPE_DEFINITION_MAP, - ResourceDefinition, - ResourceFile, - ResourceType, -) -from madsci.common.utils import ( - prompt_for_input, - prompt_from_list, - prompt_from_pydantic_model, - prompt_yes_no, - save_model, - search_for_file_pattern, -) - -console = Console() - - -class ResourceContext: - """Context object for resource commands.""" - - def __init__(self) -> None: - """Initialize the context object.""" - self.resource_file: Optional[ResourceFile] = None - self.path: Optional[Path] = None - - -pass_resource = click.make_pass_decorator(ResourceContext) - - -def find_resource_file(path: Optional[str]) -> ResourceContext: - """Find a resource file by path.""" - resource_context = ResourceContext() - - if path: - resource_context.path = Path(path) - if resource_context.path.exists(): - resource_context.resource_file = ResourceFile.from_yaml(path) - return resource_context - - # Search for any resource file - resource_files = search_for_file_pattern("*.resources.yaml") - if resource_files: - resource_context.path = Path(resource_files[0]) - resource_context.resource_file = ResourceFile.from_yaml(resource_files[0]) - - return resource_context - - -@click.group() -@click.option("--path", "-p", type=str, help="Path to the resource definition file.") -@click.pass_context -def resource(ctx: Context, path: Optional[str]) -> None: - """Manage resources.""" - ctx.obj = find_resource_file(path) - - -@resource.command() -@click.pass_context -def create(ctx: Context) -> None: - """Create a new resource file.""" - path = ctx.parent.params.get("path") - if not path: - default_path = Path.cwd() / "default.resources.yaml" - new_path = prompt_for_input( - "Path to save Resource Definition file", - default=str(default_path), - ) - if new_path: - path = Path(new_path) - - resource_file = ResourceFile() - save_model(path=path, model=resource_file) - console.print(f"Created resource file: {path}") - - -@resource.command() -def list() -> None: - """List all resource files and their contents.""" - resource_files = search_for_file_pattern("*.resources.yaml") - - if resource_files: - for resource_file in sorted(set(resource_files)): - resource_def = ResourceFile.from_yaml(resource_file) - console.print(f"\n[bold]Resource File[/]: {resource_file}") - - if resource_def.resource_types: - console.print("\n[bold]Resource Types:[/]") - for resource_type in resource_def.resource_types: - console.print( - f" [bold]{resource_type.type_name}[/]: {resource_type.type_description}", - ) - - if resource_def.default_resources: - console.print("\n[bold]Default Resources:[/]") - for resource in resource_def.default_resources: - console.print( - f" [bold]{resource.resource_name}[/]: {resource.resource_description or 'No description'}", - ) - else: - console.print("No resource files found") - - -@resource.group(name="type") -def resource_type() -> None: - """Manage resource types within a resource file.""" - - -@resource_type.command() -@click.option("--name", "-n", type=str, help="Name of the resource type.") -@click.option("--description", "-d", type=str, help="Description of the resource type.") -@click.option("--base-type", "-b", type=str, help="Base type of the resource.") -@pass_resource -def add( - ctx: ResourceContext, - name: Optional[str], - description: Optional[str], - base_type: Optional[str], -) -> None: - """Add a new resource type to the resource file.""" - if not ctx.resource_file or not ctx.path: - console.print( - "No resource file found. Create one with 'madsci resource create' first.", - ) - return - - if not name: - name = prompt_for_input("Resource Type Name", required=True) - if not description: - description = prompt_for_input("Resource Type Description") - if not base_type or base_type not in [t.value for t in RESOURCE_BASE_TYPES]: - base_type = prompt_from_list( - "Base Type", - [t.value for t in RESOURCE_BASE_TYPES], - default=ResourceType.resource.value, - ) - # *Get the appropriate type definition class - type_def_class = RESOURCE_TYPE_DEFINITION_MAP[base_type] - - # *Create the type definition with the fields we've collected - type_def = type_def_class( - **prompt_from_pydantic_model( - type_def_class, - "Resource Type Definition", - type_name=name, - type_description=description, - base_type=base_type, - ), - ) - - # *Check if type already exists - if any(rt.type_name == name for rt in ctx.resource_file.resource_types): - if not prompt_yes_no( - f"Resource type '{name}' already exists. Overwrite?", - default=False, - ): - return - # *Remove existing type - ctx.resource_file.resource_types = [ - rt for rt in ctx.resource_file.resource_types if rt.type_name != name - ] - - # *Add the type definition to the resource file - ctx.resource_file.resource_types.append(type_def) - save_model(ctx.path, ctx.resource_file, overwrite_check=False) - console.print(f"Added resource type: [bold]{name}[/]") - - -@resource_type.command() -@click.argument("name", required=False) -@pass_resource -def delete(ctx: ResourceContext, name: Optional[str]) -> None: - """Delete a resource type from the resource file.""" - if not ctx.resource_file or not ctx.path: - console.print( - "No resource file found. Create one with 'madsci resource create' first.", - ) - return - - if not ctx.resource_file.resource_types: - console.print("No resource types defined in this file.") - return - - if not name: - name = prompt_from_list( - "Resource Type to Delete", - [rt.type_name for rt in ctx.resource_file.resource_types], - required=True, - ) - - # Find the resource type - resource_type = next( - (rt for rt in ctx.resource_file.resource_types if rt.type_name == name), - None, - ) - if not resource_type: - console.print(f"Resource type [bold]{name}[/] not found.") - return - - # Check if type is used by any default resources - used_by_resources = [ - r.resource_name - for r in ctx.resource_file.default_resources - if r.resource_type == name - ] - if used_by_resources: - console.print( - f"Cannot delete resource type [bold]{name}[/] as it is used by these resources:", - ) - for resource_name in used_by_resources: - console.print(f" - {resource_name}") - return - - if prompt_yes_no(f"Delete resource type [bold]{name}[/]?", default=False): - ctx.resource_file.resource_types = [ - rt for rt in ctx.resource_file.resource_types if rt.type_name != name - ] - save_model(ctx.path, ctx.resource_file, overwrite_check=False) - console.print(f"Deleted resource type: [bold]{name}[/]") - - -@resource_type.command() -@click.argument("name", required=False) -@pass_resource -def info(ctx: ResourceContext, name: Optional[str]) -> None: - """Show information about a resource type.""" - if not ctx.resource_file: - console.print( - "No resource file found. Create one with 'madsci resource create' first.", - ) - return - - if not ctx.resource_file.resource_types: - console.print("No resource types defined in this file.") - return - - if not name: - name = prompt_from_list( - "Resource Type", - [rt.type_name for rt in ctx.resource_file.resource_types], - required=True, - ) - - resource_type = next( - (rt for rt in ctx.resource_file.resource_types if rt.type_name == name), - None, - ) - if resource_type: - pprint(resource_type) - else: - console.print(f"Resource type [bold]{name}[/] not found.") - - -@resource_type.command(name="list") -@pass_resource -def list_types(ctx: ResourceContext) -> None: - """List all resource types in the file.""" - if not ctx.resource_file: - console.print( - "No resource file found. Create one with 'madsci resource create' first.", - ) - return - - if not ctx.resource_file.resource_types: - console.print("No resource types defined in this file.") - return - - console.print("\n[bold]Resource Types:[/]") - for resource_type in ctx.resource_file.resource_types: - console.print( - f" [bold]{resource_type.type_name}[/] ({resource_type.base_type})", - ) - if resource_type.type_description: - console.print(f" Description: {resource_type.type_description}") - console.print(f" Parent Types: {', '.join(resource_type.parent_types)}") - - -@resource.command() -@pass_resource -def add_resource(ctx: ResourceContext) -> None: - """Add a new default resource to the resource file.""" - if not ctx.resource_file or not ctx.path: - console.print( - "No resource file found. Create one with 'madsci resource create' first.", - ) - return - - name = prompt_for_input("Resource Name", required=True) - description = prompt_for_input("Resource Description") - - # * Combine built-in types and custom types for selection - available_types = list(RESOURCE_DEFINITION_MAP.keys()) - custom_types = [rt.type_name for rt in ctx.resource_file.resource_types] - all_types = available_types + custom_types - - resource_type = prompt_from_list( - "Resource Type", - all_types, - default=ResourceType.resource.value, - ) - - # * Create the resource definition - resource_def_class = RESOURCE_DEFINITION_MAP.get(resource_type, ResourceDefinition) - - resource_def = resource_def_class( - resource_name=name, - resource_description=description, - resource_type=resource_type, - ) - - ctx.resource_file.default_resources.append(resource_def) - save_model(ctx.path, ctx.resource_file, overwrite_check=False) - console.print(f"Added default resource: [bold]{name}[/]") - - -@resource.command(name="info") -@pass_resource -def file_info(ctx: ResourceContext) -> None: - """Get information about a resource file.""" - if ctx.resource_file: - pprint(ctx.resource_file) - else: - console.print( - "No resource file found. Create one with 'madsci resource create'.", - ) - - -@resource.command() -@pass_resource -def validate(ctx: ResourceContext) -> None: - """Validate a resource file.""" - if ctx.resource_file: - console.print(ctx.resource_file) - else: - console.print( - "No resource file found. Create one with 'madsci resource create'.", - ) - - -@resource.command(name="delete") -@pass_resource -def delete_file(ctx: ResourceContext) -> None: - """Delete a resource file.""" - if ctx.resource_file and ctx.path: - console.print(f"Deleting resource file: {ctx.path}") - if prompt_yes_no("Are you sure?"): - ctx.path.unlink() - console.print(f"Deleted {ctx.path}") - else: - console.print( - "No resource file found. Create one with 'madsci resource create'.", - ) +"""CLI for interacting with resources.""" + +from pathlib import Path +from typing import Optional + +import click +from click.core import Context +from rich.console import Console +from rich.pretty import pprint + +from madsci.common.types.resource_types import ( + RESOURCE_BASE_TYPES, + RESOURCE_DEFINITION_MAP, + RESOURCE_TYPE_DEFINITION_MAP, + ResourceDefinition, + ResourceFile, + ResourceType, +) +from madsci.common.utils import ( + prompt_for_input, + prompt_from_list, + prompt_from_pydantic_model, + prompt_yes_no, + save_model, + search_for_file_pattern, +) + +console = Console() + + +class ResourceContext: + """Context object for resource commands.""" + + def __init__(self) -> None: + """Initialize the context object.""" + self.resource_file: Optional[ResourceFile] = None + self.path: Optional[Path] = None + + +pass_resource = click.make_pass_decorator(ResourceContext) + + +def find_resource_file(path: Optional[str]) -> ResourceContext: + """Find a resource file by path.""" + resource_context = ResourceContext() + + if path: + resource_context.path = Path(path) + if resource_context.path.exists(): + resource_context.resource_file = ResourceFile.from_yaml(path) + return resource_context + + # Search for any resource file + resource_files = search_for_file_pattern("*.resources.yaml") + if resource_files: + resource_context.path = Path(resource_files[0]) + resource_context.resource_file = ResourceFile.from_yaml(resource_files[0]) + + return resource_context + + +@click.group() +@click.option("--path", "-p", type=str, help="Path to the resource definition file.") +@click.pass_context +def resource(ctx: Context, path: Optional[str]) -> None: + """Manage resources.""" + ctx.obj = find_resource_file(path) + + +@resource.command() +@click.pass_context +def create(ctx: Context) -> None: + """Create a new resource file.""" + path = ctx.parent.params.get("path") + if not path: + default_path = Path.cwd() / "default.resources.yaml" + new_path = prompt_for_input( + "Path to save Resource Definition file", + default=str(default_path), + ) + if new_path: + path = Path(new_path) + + resource_file = ResourceFile() + save_model(path=path, model=resource_file) + console.print(f"Created resource file: {path}") + + +@resource.command() +def list() -> None: + """List all resource files and their contents.""" + resource_files = search_for_file_pattern("*.resources.yaml") + + if resource_files: + for resource_file in sorted(set(resource_files)): + resource_def = ResourceFile.from_yaml(resource_file) + console.print(f"\n[bold]Resource File[/]: {resource_file}") + + if resource_def.resource_types: + console.print("\n[bold]Resource Types:[/]") + for resource_type in resource_def.resource_types: + console.print( + f" [bold]{resource_type.type_name}[/]: {resource_type.type_description}", + ) + + if resource_def.default_resources: + console.print("\n[bold]Default Resources:[/]") + for resource in resource_def.default_resources: + console.print( + f" [bold]{resource.resource_name}[/]: {resource.resource_description or 'No description'}", + ) + else: + console.print("No resource files found") + + +@resource.group(name="type") +def resource_type() -> None: + """Manage resource types within a resource file.""" + + +@resource_type.command() +@click.option("--name", "-n", type=str, help="Name of the resource type.") +@click.option("--description", "-d", type=str, help="Description of the resource type.") +@click.option("--base-type", "-b", type=str, help="Base type of the resource.") +@pass_resource +def add( + ctx: ResourceContext, + name: Optional[str], + description: Optional[str], + base_type: Optional[str], +) -> None: + """Add a new resource type to the resource file.""" + if not ctx.resource_file or not ctx.path: + console.print( + "No resource file found. Create one with 'madsci resource create' first.", + ) + return + + if not name: + name = prompt_for_input("Resource Type Name", required=True) + if not description: + description = prompt_for_input("Resource Type Description") + if not base_type or base_type not in [t.value for t in RESOURCE_BASE_TYPES]: + base_type = prompt_from_list( + "Base Type", + [t.value for t in RESOURCE_BASE_TYPES], + default=ResourceType.resource.value, + ) + # *Get the appropriate type definition class + type_def_class = RESOURCE_TYPE_DEFINITION_MAP[base_type] + + # *Create the type definition with the fields we've collected + type_def = type_def_class( + **prompt_from_pydantic_model( + type_def_class, + "Resource Type Definition", + type_name=name, + type_description=description, + base_type=base_type, + ), + ) + + # *Check if type already exists + if any(rt.type_name == name for rt in ctx.resource_file.resource_types): + if not prompt_yes_no( + f"Resource type '{name}' already exists. Overwrite?", + default=False, + ): + return + # *Remove existing type + ctx.resource_file.resource_types = [ + rt for rt in ctx.resource_file.resource_types if rt.type_name != name + ] + + # *Add the type definition to the resource file + ctx.resource_file.resource_types.append(type_def) + save_model(ctx.path, ctx.resource_file, overwrite_check=False) + console.print(f"Added resource type: [bold]{name}[/]") + + +@resource_type.command() +@click.argument("name", required=False) +@pass_resource +def delete(ctx: ResourceContext, name: Optional[str]) -> None: + """Delete a resource type from the resource file.""" + if not ctx.resource_file or not ctx.path: + console.print( + "No resource file found. Create one with 'madsci resource create' first.", + ) + return + + if not ctx.resource_file.resource_types: + console.print("No resource types defined in this file.") + return + + if not name: + name = prompt_from_list( + "Resource Type to Delete", + [rt.type_name for rt in ctx.resource_file.resource_types], + required=True, + ) + + # Find the resource type + resource_type = next( + (rt for rt in ctx.resource_file.resource_types if rt.type_name == name), + None, + ) + if not resource_type: + console.print(f"Resource type [bold]{name}[/] not found.") + return + + # Check if type is used by any default resources + used_by_resources = [ + r.resource_name + for r in ctx.resource_file.default_resources + if r.resource_type == name + ] + if used_by_resources: + console.print( + f"Cannot delete resource type [bold]{name}[/] as it is used by these resources:", + ) + for resource_name in used_by_resources: + console.print(f" - {resource_name}") + return + + if prompt_yes_no(f"Delete resource type [bold]{name}[/]?", default=False): + ctx.resource_file.resource_types = [ + rt for rt in ctx.resource_file.resource_types if rt.type_name != name + ] + save_model(ctx.path, ctx.resource_file, overwrite_check=False) + console.print(f"Deleted resource type: [bold]{name}[/]") + + +@resource_type.command() +@click.argument("name", required=False) +@pass_resource +def info(ctx: ResourceContext, name: Optional[str]) -> None: + """Show information about a resource type.""" + if not ctx.resource_file: + console.print( + "No resource file found. Create one with 'madsci resource create' first.", + ) + return + + if not ctx.resource_file.resource_types: + console.print("No resource types defined in this file.") + return + + if not name: + name = prompt_from_list( + "Resource Type", + [rt.type_name for rt in ctx.resource_file.resource_types], + required=True, + ) + + resource_type = next( + (rt for rt in ctx.resource_file.resource_types if rt.type_name == name), + None, + ) + if resource_type: + pprint(resource_type) + else: + console.print(f"Resource type [bold]{name}[/] not found.") + + +@resource_type.command(name="list") +@pass_resource +def list_types(ctx: ResourceContext) -> None: + """List all resource types in the file.""" + if not ctx.resource_file: + console.print( + "No resource file found. Create one with 'madsci resource create' first.", + ) + return + + if not ctx.resource_file.resource_types: + console.print("No resource types defined in this file.") + return + + console.print("\n[bold]Resource Types:[/]") + for resource_type in ctx.resource_file.resource_types: + console.print( + f" [bold]{resource_type.type_name}[/] ({resource_type.base_type})", + ) + if resource_type.type_description: + console.print(f" Description: {resource_type.type_description}") + console.print(f" Parent Types: {', '.join(resource_type.parent_types)}") + + +@resource.command() +@pass_resource +def add_resource(ctx: ResourceContext) -> None: + """Add a new default resource to the resource file.""" + if not ctx.resource_file or not ctx.path: + console.print( + "No resource file found. Create one with 'madsci resource create' first.", + ) + return + + name = prompt_for_input("Resource Name", required=True) + description = prompt_for_input("Resource Description") + + # * Combine built-in types and custom types for selection + available_types = list(RESOURCE_DEFINITION_MAP.keys()) + custom_types = [rt.type_name for rt in ctx.resource_file.resource_types] + all_types = available_types + custom_types + + resource_type = prompt_from_list( + "Resource Type", + all_types, + default=ResourceType.resource.value, + ) + + # * Create the resource definition + resource_def_class = RESOURCE_DEFINITION_MAP.get(resource_type, ResourceDefinition) + + resource_def = resource_def_class( + resource_name=name, + resource_description=description, + resource_type=resource_type, + ) + + ctx.resource_file.default_resources.append(resource_def) + save_model(ctx.path, ctx.resource_file, overwrite_check=False) + console.print(f"Added default resource: [bold]{name}[/]") + + +@resource.command(name="info") +@pass_resource +def file_info(ctx: ResourceContext) -> None: + """Get information about a resource file.""" + if ctx.resource_file: + pprint(ctx.resource_file) + else: + console.print( + "No resource file found. Create one with 'madsci resource create'.", + ) + + +@resource.command() +@pass_resource +def validate(ctx: ResourceContext) -> None: + """Validate a resource file.""" + if ctx.resource_file: + console.print(ctx.resource_file) + else: + console.print( + "No resource file found. Create one with 'madsci resource create'.", + ) + + +@resource.command(name="delete") +@pass_resource +def delete_file(ctx: ResourceContext) -> None: + """Delete a resource file.""" + if ctx.resource_file and ctx.path: + console.print(f"Deleting resource file: {ctx.path}") + if prompt_yes_no("Are you sure?"): + ctx.path.unlink() + console.print(f"Deleted {ctx.path}") + else: + console.print( + "No resource file found. Create one with 'madsci resource create'.", + ) diff --git a/madsci/madsci_client/madsci/client/cli/workcell_cli.py b/madsci/madsci_client/madsci/client/cli/workcell_cli.py index fd46b6c..b33f756 100644 --- a/madsci/madsci_client/madsci/client/cli/workcell_cli.py +++ b/madsci/madsci_client/madsci/client/cli/workcell_cli.py @@ -1,261 +1,261 @@ -"""Command Line Interface for managing MADSci Squid workcells.""" - -from pathlib import Path -from typing import Optional - -import click -from click import Context -from rich.console import Console -from rich.pretty import pprint - -from madsci.client.cli.lab_cli import LabContext, find_lab -from madsci.common.types.workcell_types import WorkcellDefinition -from madsci.common.utils import ( - prompt_for_input, - prompt_yes_no, - save_model, - search_for_file_pattern, - to_snake_case, -) - -console = Console() - - -class WorkcellContext: - """Context object for workcell commands.""" - - def __init__(self) -> None: - """Initialize the context object.""" - self.workcell: Optional[WorkcellDefinition] = None - self.path: Optional[Path] = None - self.lab: Optional[LabContext] = None - self.quiet: bool = False - - -pass_workcell = click.make_pass_decorator(WorkcellContext) - - -def find_workcell( - name: Optional[str], - path: Optional[str], - lab_context: Optional[LabContext] = None, -) -> WorkcellContext: - """Find a workcell by name or path.""" - workcell_context = WorkcellContext() - workcell_context.lab = lab_context - - if path: - workcell_context.path = Path(path) - if workcell_context.path.exists(): - workcell_context.workcell = WorkcellDefinition.from_yaml(path) - return workcell_context - - # If we have a lab context, search in the lab directory first - if lab_context and lab_context.path: - workcell_files = search_for_file_pattern( - "*.workcell.yaml", - start_dir=lab_context.path.parent, - ) - for workcell_file in workcell_files: - workcell = WorkcellDefinition.from_yaml(workcell_file) - if not name or workcell.name == name: - workcell_context.path = Path(workcell_file) - workcell_context.workcell = workcell - return workcell_context - - # If not found in lab directory or no lab context, search everywhere - workcell_files = search_for_file_pattern("*.workcell.yaml") - for workcell_file in workcell_files: - workcell = WorkcellDefinition.from_yaml(workcell_file) - if not name or workcell.name == name: - workcell_context.path = Path(workcell_file) - workcell_context.workcell = workcell - return workcell_context - - return workcell_context - - -@click.group() -@click.option("--name", "-n", type=str, help="Name of the workcell.") -@click.option("--path", "-p", type=str, help="Path to the workcell definition file.") -@click.option("--lab", "-l", type=str, help="Name or path of the lab to operate in.") -@click.pass_context -def workcell( - ctx: Context, - name: Optional[str], - path: Optional[str], - lab: Optional[str], -) -> None: - """Manage workcells. Specify workcell by name or path.""" - lab_context = find_lab(name=lab, path=lab) - ctx.obj = find_workcell(name=name, path=path, lab_context=lab_context) - ctx.obj.quiet = ctx.parent.params.get("quiet") - - -@workcell.command() -@click.option("--name", "-n", type=str, help="The name of the workcell.") -@click.option( - "--path", - "-p", - type=str, - help="The path to the workcell definition file.", -) -@click.option("--description", "-d", type=str, help="The description of the workcell.") -@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt.") -@click.pass_context -def create( - ctx: Context, - name: Optional[str], - path: Optional[str], - description: Optional[str], - yes: bool, -) -> None: - """Create a new workcell.""" - name = name if name else ctx.parent.params.get("name") - name = ( - name - if name - else prompt_for_input("Workcell Name", required=True, quiet=ctx.obj.quiet) - ) - description = ( - description - if description - else prompt_for_input("Workcell Description", quiet=ctx.obj.quiet) - ) - - workcell = WorkcellDefinition(name=name, description=description) - console.print(workcell) - - path = path if path else ctx.parent.params.get("path") - if not path: - if ctx.obj.lab and ctx.obj.lab.path: - # If we have a lab context, create in the lab directory - path = ( - ctx.obj.lab.path.parent - / "workcells" - / f"{to_snake_case(name)}.workcell.yaml" - ) - else: - current_path = Path.cwd() - if current_path.name == "workcells": - path = current_path / f"{to_snake_case(name)}.workcell.yaml" - else: - path = ( - current_path / "workcells" / f"{to_snake_case(name)}.workcell.yaml" - ) - - new_path = prompt_for_input( - "Path to save Workcell Definition file", - default=path, - quiet=ctx.obj.quiet, - ) - if new_path: - path = Path(new_path) - else: - path = Path(path) - - if not path.exists(): - path.parent.mkdir(parents=True, exist_ok=True) - save_model(path, workcell, overwrite_check=not ctx.obj.quiet and not yes) - - if ( - ctx.obj.lab - and ctx.obj.lab.lab_def - and ( - yes - or ctx.obj.quiet - or prompt_yes_no( - f"Add workcell to lab [bold]{ctx.obj.lab.lab_def.name}[/] ([italic]{ctx.obj.lab.path}[/])?", - default="yes", - ) - ) - ): - relative_path = path.relative_to(ctx.obj.lab.path.parent) - if name not in ctx.obj.lab.lab_def.workcells: - ctx.obj.lab.lab_def.workcells[name] = relative_path - save_model(ctx.obj.lab.path, ctx.obj.lab.lab_def, overwrite_check=False) - - -@workcell.command() -@pass_workcell -def list(ctx: WorkcellContext) -> None: - """List all workcells.""" - search_dir = ctx.lab.path.parent if ctx.lab and ctx.lab.path else None - workcell_files = search_for_file_pattern("*.workcell.yaml", start_dir=search_dir) - - if workcell_files: - for workcell_file in sorted(set(workcell_files)): - workcell = WorkcellDefinition.from_yaml(workcell_file) - console.print( - f"[bold]{workcell.name}[/]: {workcell.description} ({workcell_file})", - ) - else: - lab_context = " in lab directory" if ctx.lab and ctx.lab.path else "" - print( - f"No workcell definitions found{lab_context}, you can create one with 'madsci workcell create'", - ) - - -@workcell.command() -@pass_workcell -def info(ctx: WorkcellContext) -> None: - """Get information about a workcell.""" - if ctx.workcell: - pprint(ctx.workcell) - else: - print( - "No workcell specified/found, please specify a workcell with --name or --path, or create a new workcell with 'madsci workcell create'", - ) - - -@workcell.command() -@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt.") -@click.option("--name", "-n", type=str, help="The name of the workcell.") -@click.option( - "--path", - "-p", - type=str, - help="The path to the workcell definition file.", -) -@pass_workcell -def delete( - ctx: WorkcellContext, - yes: bool, - name: Optional[str], - path: Optional[str], -) -> None: - """Delete a workcell.""" - if name or path: - ctx.workcell = find_workcell(name=name, path=None, lab_context=ctx.lab).workcell - if ctx.workcell and ctx.path: - console.print(f"Deleting workcell: {ctx.workcell.name} ({ctx.path})") - if yes or ctx.quiet or prompt_yes_no("Are you sure?", default="no"): - ctx.path.unlink() - console.print(f"Deleted {ctx.path}") - if ( - (ctx.lab and ctx.lab.lab_def and yes) - or ctx.quiet - or prompt_yes_no( - f"Remove from lab [bold]{ctx.lab.lab_def.name}[/] ([italic]{ctx.lab.path}[/])?", - default="yes", - ) - ) and ctx.workcell.name in ctx.lab.lab_def.workcells: - del ctx.lab.lab_def.workcells[ctx.workcell.name] - save_model(ctx.lab.path, ctx.lab.lab_def, overwrite_check=False) - else: - print( - "No workcell specified/found, please specify a workcell with --name or --path, or create a new workcell with 'madsci workcell create'", - ) - - -@workcell.command() -@pass_workcell -def validate(ctx: WorkcellContext) -> None: - """Validate a workcell definition file.""" - if ctx.workcell: - console.print(ctx.workcell) - return - console.print( - "No workcell specified, please specify a workcell with --name or --path, or create a new workcell with 'madsci workcell create'", - ) - return +"""Command Line Interface for managing MADSci Squid workcells.""" + +from pathlib import Path +from typing import Optional + +import click +from click import Context +from rich.console import Console +from rich.pretty import pprint + +from madsci.client.cli.lab_cli import LabContext, find_lab +from madsci.common.types.workcell_types import WorkcellDefinition +from madsci.common.utils import ( + prompt_for_input, + prompt_yes_no, + save_model, + search_for_file_pattern, + to_snake_case, +) + +console = Console() + + +class WorkcellContext: + """Context object for workcell commands.""" + + def __init__(self) -> None: + """Initialize the context object.""" + self.workcell: Optional[WorkcellDefinition] = None + self.path: Optional[Path] = None + self.lab: Optional[LabContext] = None + self.quiet: bool = False + + +pass_workcell = click.make_pass_decorator(WorkcellContext) + + +def find_workcell( + name: Optional[str], + path: Optional[str], + lab_context: Optional[LabContext] = None, +) -> WorkcellContext: + """Find a workcell by name or path.""" + workcell_context = WorkcellContext() + workcell_context.lab = lab_context + + if path: + workcell_context.path = Path(path) + if workcell_context.path.exists(): + workcell_context.workcell = WorkcellDefinition.from_yaml(path) + return workcell_context + + # If we have a lab context, search in the lab directory first + if lab_context and lab_context.path: + workcell_files = search_for_file_pattern( + "*.workcell.yaml", + start_dir=lab_context.path.parent, + ) + for workcell_file in workcell_files: + workcell = WorkcellDefinition.from_yaml(workcell_file) + if not name or workcell.name == name: + workcell_context.path = Path(workcell_file) + workcell_context.workcell = workcell + return workcell_context + + # If not found in lab directory or no lab context, search everywhere + workcell_files = search_for_file_pattern("*.workcell.yaml") + for workcell_file in workcell_files: + workcell = WorkcellDefinition.from_yaml(workcell_file) + if not name or workcell.name == name: + workcell_context.path = Path(workcell_file) + workcell_context.workcell = workcell + return workcell_context + + return workcell_context + + +@click.group() +@click.option("--name", "-n", type=str, help="Name of the workcell.") +@click.option("--path", "-p", type=str, help="Path to the workcell definition file.") +@click.option("--lab", "-l", type=str, help="Name or path of the lab to operate in.") +@click.pass_context +def workcell( + ctx: Context, + name: Optional[str], + path: Optional[str], + lab: Optional[str], +) -> None: + """Manage workcells. Specify workcell by name or path.""" + lab_context = find_lab(name=lab, path=lab) + ctx.obj = find_workcell(name=name, path=path, lab_context=lab_context) + ctx.obj.quiet = ctx.parent.params.get("quiet") + + +@workcell.command() +@click.option("--name", "-n", type=str, help="The name of the workcell.") +@click.option( + "--path", + "-p", + type=str, + help="The path to the workcell definition file.", +) +@click.option("--description", "-d", type=str, help="The description of the workcell.") +@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt.") +@click.pass_context +def create( + ctx: Context, + name: Optional[str], + path: Optional[str], + description: Optional[str], + yes: bool, +) -> None: + """Create a new workcell.""" + name = name if name else ctx.parent.params.get("name") + name = ( + name + if name + else prompt_for_input("Workcell Name", required=True, quiet=ctx.obj.quiet) + ) + description = ( + description + if description + else prompt_for_input("Workcell Description", quiet=ctx.obj.quiet) + ) + + workcell = WorkcellDefinition(name=name, description=description) + console.print(workcell) + + path = path if path else ctx.parent.params.get("path") + if not path: + if ctx.obj.lab and ctx.obj.lab.path: + # If we have a lab context, create in the lab directory + path = ( + ctx.obj.lab.path.parent + / "workcells" + / f"{to_snake_case(name)}.workcell.yaml" + ) + else: + current_path = Path.cwd() + if current_path.name == "workcells": + path = current_path / f"{to_snake_case(name)}.workcell.yaml" + else: + path = ( + current_path / "workcells" / f"{to_snake_case(name)}.workcell.yaml" + ) + + new_path = prompt_for_input( + "Path to save Workcell Definition file", + default=path, + quiet=ctx.obj.quiet, + ) + if new_path: + path = Path(new_path) + else: + path = Path(path) + + if not path.exists(): + path.parent.mkdir(parents=True, exist_ok=True) + save_model(path, workcell, overwrite_check=not ctx.obj.quiet and not yes) + + if ( + ctx.obj.lab + and ctx.obj.lab.lab_def + and ( + yes + or ctx.obj.quiet + or prompt_yes_no( + f"Add workcell to lab [bold]{ctx.obj.lab.lab_def.name}[/] ([italic]{ctx.obj.lab.path}[/])?", + default="yes", + ) + ) + ): + relative_path = path.relative_to(ctx.obj.lab.path.parent) + if name not in ctx.obj.lab.lab_def.workcells: + ctx.obj.lab.lab_def.workcells[name] = relative_path + save_model(ctx.obj.lab.path, ctx.obj.lab.lab_def, overwrite_check=False) + + +@workcell.command() +@pass_workcell +def list(ctx: WorkcellContext) -> None: + """List all workcells.""" + search_dir = ctx.lab.path.parent if ctx.lab and ctx.lab.path else None + workcell_files = search_for_file_pattern("*.workcell.yaml", start_dir=search_dir) + + if workcell_files: + for workcell_file in sorted(set(workcell_files)): + workcell = WorkcellDefinition.from_yaml(workcell_file) + console.print( + f"[bold]{workcell.name}[/]: {workcell.description} ({workcell_file})", + ) + else: + lab_context = " in lab directory" if ctx.lab and ctx.lab.path else "" + print( + f"No workcell definitions found{lab_context}, you can create one with 'madsci workcell create'", + ) + + +@workcell.command() +@pass_workcell +def info(ctx: WorkcellContext) -> None: + """Get information about a workcell.""" + if ctx.workcell: + pprint(ctx.workcell) + else: + print( + "No workcell specified/found, please specify a workcell with --name or --path, or create a new workcell with 'madsci workcell create'", + ) + + +@workcell.command() +@click.option("--yes", "-y", is_flag=True, help="Skip confirmation prompt.") +@click.option("--name", "-n", type=str, help="The name of the workcell.") +@click.option( + "--path", + "-p", + type=str, + help="The path to the workcell definition file.", +) +@pass_workcell +def delete( + ctx: WorkcellContext, + yes: bool, + name: Optional[str], + path: Optional[str], +) -> None: + """Delete a workcell.""" + if name or path: + ctx.workcell = find_workcell(name=name, path=None, lab_context=ctx.lab).workcell + if ctx.workcell and ctx.path: + console.print(f"Deleting workcell: {ctx.workcell.name} ({ctx.path})") + if yes or ctx.quiet or prompt_yes_no("Are you sure?", default="no"): + ctx.path.unlink() + console.print(f"Deleted {ctx.path}") + if ( + (ctx.lab and ctx.lab.lab_def and yes) + or ctx.quiet + or prompt_yes_no( + f"Remove from lab [bold]{ctx.lab.lab_def.name}[/] ([italic]{ctx.lab.path}[/])?", + default="yes", + ) + ) and ctx.workcell.name in ctx.lab.lab_def.workcells: + del ctx.lab.lab_def.workcells[ctx.workcell.name] + save_model(ctx.lab.path, ctx.lab.lab_def, overwrite_check=False) + else: + print( + "No workcell specified/found, please specify a workcell with --name or --path, or create a new workcell with 'madsci workcell create'", + ) + + +@workcell.command() +@pass_workcell +def validate(ctx: WorkcellContext) -> None: + """Validate a workcell definition file.""" + if ctx.workcell: + console.print(ctx.workcell) + return + console.print( + "No workcell specified, please specify a workcell with --name or --path, or create a new workcell with 'madsci workcell create'", + ) + return diff --git a/madsci/madsci_client/madsci/client/node/__init__.py b/madsci/madsci_client/madsci/client/node/__init__.py index db1d38e..c6fb537 100644 --- a/madsci/madsci_client/madsci/client/node/__init__.py +++ b/madsci/madsci_client/madsci/client/node/__init__.py @@ -1,14 +1,14 @@ -"""MADSci node client implementations.""" - -from madsci.client.node.abstract_node_client import AbstractNodeClient -from madsci.client.node.rest_node_client import RestNodeClient - -NODE_CLIENT_MAP = { - "rest_node_client": RestNodeClient, -} - -__all__ = [ - "NODE_CLIENT_MAP", - "AbstractNodeClient", - "RestNodeClient", -] +"""MADSci node client implementations.""" + +from madsci.client.node.abstract_node_client import AbstractNodeClient +from madsci.client.node.rest_node_client import RestNodeClient + +NODE_CLIENT_MAP = { + "rest_node_client": RestNodeClient, +} + +__all__ = [ + "NODE_CLIENT_MAP", + "AbstractNodeClient", + "RestNodeClient", +] diff --git a/madsci/madsci_client/madsci/client/node/abstract_node_client.py b/madsci/madsci_client/madsci/client/node/abstract_node_client.py index e5880e5..fb3f566 100644 --- a/madsci/madsci_client/madsci/client/node/abstract_node_client.py +++ b/madsci/madsci_client/madsci/client/node/abstract_node_client.py @@ -1,79 +1,79 @@ -"""Base node client implementation.""" - -from typing import Any, ClassVar - -from madsci.common.types.action_types import ( - ActionRequest, - ActionResult, -) -from madsci.common.types.admin_command_types import AdminCommandResponse -from madsci.common.types.event_types import Event -from madsci.common.types.module_types import ( - AdminCommands, - NodeClientCapabilities, -) -from madsci.common.types.node_types import ( - Node, - NodeInfo, - NodeSetConfigResponse, - NodeStatus, -) -from madsci.common.types.resource_types import ResourceDefinition - - -class AbstractNodeClient: - """Base Node Client, protocol agnostic, all node clients should inherit from or be based on this.""" - - url_protocols: ClassVar[list[str]] = [] - """The protocol(s) to use for node URL's using this client.""" - - supported_capabilities: ClassVar[NodeClientCapabilities] = NodeClientCapabilities() - """The capabilities supported by this node client.""" - - def __init__(self, node: Node) -> "AbstractNodeClient": - """Initialize the client.""" - self.node = node - - def send_action(self, action_request: ActionRequest) -> ActionResult: - """Perform an action on the node.""" - raise NotImplementedError("send_action not implemented by this client") - - def get_action_history(self) -> list[str]: - """Get a list of the action IDs for actions that the node has recently performed.""" - raise NotImplementedError( - "get_action_history is not implemented by this client" - ) - - def get_action_result(self, action_id: str) -> ActionResult: - """Get the status of an action on the node.""" - raise NotImplementedError("get_action_result is not implemented by this client") - - def get_status(self) -> NodeStatus: - """Get the status of the node.""" - raise NotImplementedError("get_status is not implemented by this client") - - def get_state(self) -> dict[str, Any]: - """Get the state of the node.""" - raise NotImplementedError("get_state is not implemented by this client") - - def get_info(self) -> NodeInfo: - """Get information about the node and module.""" - raise NotImplementedError("get_info is not implemented by this client") - - def set_config(self, config_dict: dict[str, Any]) -> NodeSetConfigResponse: - """Set configuration values of the node.""" - raise NotImplementedError("set_config is not implemented by this client") - - def send_admin_command(self, admin_command: AdminCommands) -> AdminCommandResponse: - """Perform an administrative command on the node.""" - raise NotImplementedError( - "send_admin_command is not implemented by this client" - ) - - def get_resources(self) -> dict[str, ResourceDefinition]: - """Get the resources of the node.""" - raise NotImplementedError("get_resources is not implemented by this client") - - def get_log(self) -> list[Event]: - """Get the log of the node.""" - raise NotImplementedError("get_log is not implemented by this client") +"""Base node client implementation.""" + +from typing import Any, ClassVar + +from madsci.common.types.action_types import ( + ActionRequest, + ActionResult, +) +from madsci.common.types.admin_command_types import AdminCommandResponse +from madsci.common.types.event_types import Event +from madsci.common.types.module_types import ( + AdminCommands, + NodeClientCapabilities, +) +from madsci.common.types.node_types import ( + Node, + NodeInfo, + NodeSetConfigResponse, + NodeStatus, +) +from madsci.common.types.resource_types import ResourceDefinition + + +class AbstractNodeClient: + """Base Node Client, protocol agnostic, all node clients should inherit from or be based on this.""" + + url_protocols: ClassVar[list[str]] = [] + """The protocol(s) to use for node URL's using this client.""" + + supported_capabilities: ClassVar[NodeClientCapabilities] = NodeClientCapabilities() + """The capabilities supported by this node client.""" + + def __init__(self, node: Node) -> "AbstractNodeClient": + """Initialize the client.""" + self.node = node + + def send_action(self, action_request: ActionRequest) -> ActionResult: + """Perform an action on the node.""" + raise NotImplementedError("send_action not implemented by this client") + + def get_action_history(self) -> list[str]: + """Get a list of the action IDs for actions that the node has recently performed.""" + raise NotImplementedError( + "get_action_history is not implemented by this client" + ) + + def get_action_result(self, action_id: str) -> ActionResult: + """Get the status of an action on the node.""" + raise NotImplementedError("get_action_result is not implemented by this client") + + def get_status(self) -> NodeStatus: + """Get the status of the node.""" + raise NotImplementedError("get_status is not implemented by this client") + + def get_state(self) -> dict[str, Any]: + """Get the state of the node.""" + raise NotImplementedError("get_state is not implemented by this client") + + def get_info(self) -> NodeInfo: + """Get information about the node and module.""" + raise NotImplementedError("get_info is not implemented by this client") + + def set_config(self, config_dict: dict[str, Any]) -> NodeSetConfigResponse: + """Set configuration values of the node.""" + raise NotImplementedError("set_config is not implemented by this client") + + def send_admin_command(self, admin_command: AdminCommands) -> AdminCommandResponse: + """Perform an administrative command on the node.""" + raise NotImplementedError( + "send_admin_command is not implemented by this client" + ) + + def get_resources(self) -> dict[str, ResourceDefinition]: + """Get the resources of the node.""" + raise NotImplementedError("get_resources is not implemented by this client") + + def get_log(self) -> list[Event]: + """Get the log of the node.""" + raise NotImplementedError("get_log is not implemented by this client") diff --git a/madsci/madsci_client/madsci/client/node/rest_node_client.py b/madsci/madsci_client/madsci/client/node/rest_node_client.py index cf17754..c70bd69 100644 --- a/madsci/madsci_client/madsci/client/node/rest_node_client.py +++ b/madsci/madsci_client/madsci/client/node/rest_node_client.py @@ -1,151 +1,151 @@ -"""REST-based node client implementation.""" - -import json -from pathlib import Path -from typing import Any, ClassVar - -import requests - -from madsci.client.node.abstract_node_client import ( - AbstractNodeClient, -) -from madsci.common.types.action_types import ActionRequest, ActionResult -from madsci.common.types.admin_command_types import AdminCommandResponse -from madsci.common.types.event_types import Event -from madsci.common.types.module_types import ( - AdminCommands, - NodeClientCapabilities, -) -from madsci.common.types.node_types import ( - Node, - NodeInfo, - NodeSetConfigResponse, - NodeStatus, -) -from madsci.common.types.resource_types import ResourceDefinition - - -class RestNodeClient(AbstractNodeClient): - """REST-based node client.""" - - url_protocols: ClassVar[list[str]] = ["http", "https"] - """The protocols supported by this client.""" - - supported_capabilities: NodeClientCapabilities = NodeClientCapabilities( - get_info=True, - get_state=True, - get_status=True, - send_action=True, - get_action_result=True, - get_action_history=True, - action_files=True, - send_admin_commands=True, - set_config=True, - get_resources=False, - ) - - def __init__(self, node: Node) -> "RestNodeClient": - """Initialize the client.""" - super().__init__(node) - - def send_action(self, action_request: ActionRequest) -> ActionResult: - """Perform an action on the node.""" - files = [] - try: - files = [ - ("files", (file, Path(path).open("rb"))) # noqa: SIM115 - for file, path in action_request.files.items() - ] - print(files) - - rest_response = requests.post( - f"{self.node.node_url}/action", - params={ - "action_name": action_request.action_name, - "args": json.dumps(action_request.args), - "action_id": action_request.action_id, - }, - files=files, - timeout=10, - ) - finally: - # * Ensure files are closed - for file in files: - file[1].close() - if not rest_response.ok: - rest_response.raise_for_status() - return ActionResult.model_validate(rest_response.json()) - - def get_action_history(self) -> list[str]: - """Get a list of the action IDs for actions that the node has recently performed.""" - response = requests.get(f"{self.node.node_url}/action", timeout=10) - if not response.ok: - response.raise_for_status() - return response.json() - - def get_action_result(self, action_id: str) -> ActionResult: - """Get the result of an action on the node.""" - response = requests.get( - f"{self.node.node_url}/action/{action_id}", - timeout=10, - ) - if not response.ok: - response.raise_for_status() - return ActionResult.model_validate(response.json()) - - def get_status(self) -> NodeStatus: - """Get the status of the node.""" - response = requests.get(f"{self.node.node_url}/status", timeout=10) - if not response.ok: - response.raise_for_status() - return NodeStatus.model_validate(response.json()) - - def get_state(self) -> dict[str, Any]: - """Get the state of the node.""" - response = requests.get(f"{self.node.node_url}/state", timeout=10) - if not response.ok: - response.raise_for_status() - return response.json() - - def get_info(self) -> NodeInfo: - """Get information about the node and module.""" - response = requests.get(f"{self.node.node_url}/info", timeout=10) - if not response.ok: - response.raise_for_status() - return NodeInfo.model_validate(response.json()) - - def set_config(self, config_dict: dict[str, Any]) -> NodeSetConfigResponse: - """Set configuration values of the node.""" - response = requests.post( - f"{self.node.node_url}/config", - json=config_dict, - timeout=60, - ) - if not response.ok: - response.raise_for_status() - return NodeSetConfigResponse.model_validate(response.json()) - - def send_admin_command(self, admin_command: AdminCommands) -> bool: - """Perform an administrative command on the node.""" - response = requests.post( - f"{self.node.node_url}/admin", - json={"admin_command": admin_command}, - timeout=10, - ) - if not response.ok: - response.raise_for_status() - return AdminCommandResponse.model_validate(response.json()) - - def get_resources(self) -> dict[str, ResourceDefinition]: - """Get the resources of the node.""" - raise NotImplementedError( - "get_resources is not implemented by this client", - ) - # TODO: Implement get_resources endpoint - - def get_log(self) -> list[Event]: - """Get the log from the node""" - response = requests.get(f"{self.node.node_url}/log", timeout=10) - if not response.ok: - response.raise_for_status() - return response.json() +"""REST-based node client implementation.""" + +import json +from pathlib import Path +from typing import Any, ClassVar + +import requests + +from madsci.client.node.abstract_node_client import ( + AbstractNodeClient, +) +from madsci.common.types.action_types import ActionRequest, ActionResult +from madsci.common.types.admin_command_types import AdminCommandResponse +from madsci.common.types.event_types import Event +from madsci.common.types.module_types import ( + AdminCommands, + NodeClientCapabilities, +) +from madsci.common.types.node_types import ( + Node, + NodeInfo, + NodeSetConfigResponse, + NodeStatus, +) +from madsci.common.types.resource_types import ResourceDefinition + + +class RestNodeClient(AbstractNodeClient): + """REST-based node client.""" + + url_protocols: ClassVar[list[str]] = ["http", "https"] + """The protocols supported by this client.""" + + supported_capabilities: NodeClientCapabilities = NodeClientCapabilities( + get_info=True, + get_state=True, + get_status=True, + send_action=True, + get_action_result=True, + get_action_history=True, + action_files=True, + send_admin_commands=True, + set_config=True, + get_resources=False, + ) + + def __init__(self, node: Node) -> "RestNodeClient": + """Initialize the client.""" + super().__init__(node) + + def send_action(self, action_request: ActionRequest) -> ActionResult: + """Perform an action on the node.""" + files = [] + try: + files = [ + ("files", (file, Path(path).open("rb"))) # noqa: SIM115 + for file, path in action_request.files.items() + ] + print(files) + + rest_response = requests.post( + f"{self.node.node_url}/action", + params={ + "action_name": action_request.action_name, + "args": json.dumps(action_request.args), + "action_id": action_request.action_id, + }, + files=files, + timeout=10, + ) + finally: + # * Ensure files are closed + for file in files: + file[1].close() + if not rest_response.ok: + rest_response.raise_for_status() + return ActionResult.model_validate(rest_response.json()) + + def get_action_history(self) -> list[str]: + """Get a list of the action IDs for actions that the node has recently performed.""" + response = requests.get(f"{self.node.node_url}/action", timeout=10) + if not response.ok: + response.raise_for_status() + return response.json() + + def get_action_result(self, action_id: str) -> ActionResult: + """Get the result of an action on the node.""" + response = requests.get( + f"{self.node.node_url}/action/{action_id}", + timeout=10, + ) + if not response.ok: + response.raise_for_status() + return ActionResult.model_validate(response.json()) + + def get_status(self) -> NodeStatus: + """Get the status of the node.""" + response = requests.get(f"{self.node.node_url}/status", timeout=10) + if not response.ok: + response.raise_for_status() + return NodeStatus.model_validate(response.json()) + + def get_state(self) -> dict[str, Any]: + """Get the state of the node.""" + response = requests.get(f"{self.node.node_url}/state", timeout=10) + if not response.ok: + response.raise_for_status() + return response.json() + + def get_info(self) -> NodeInfo: + """Get information about the node and module.""" + response = requests.get(f"{self.node.node_url}/info", timeout=10) + if not response.ok: + response.raise_for_status() + return NodeInfo.model_validate(response.json()) + + def set_config(self, config_dict: dict[str, Any]) -> NodeSetConfigResponse: + """Set configuration values of the node.""" + response = requests.post( + f"{self.node.node_url}/config", + json=config_dict, + timeout=60, + ) + if not response.ok: + response.raise_for_status() + return NodeSetConfigResponse.model_validate(response.json()) + + def send_admin_command(self, admin_command: AdminCommands) -> bool: + """Perform an administrative command on the node.""" + response = requests.post( + f"{self.node.node_url}/admin", + json={"admin_command": admin_command}, + timeout=10, + ) + if not response.ok: + response.raise_for_status() + return AdminCommandResponse.model_validate(response.json()) + + def get_resources(self) -> dict[str, ResourceDefinition]: + """Get the resources of the node.""" + raise NotImplementedError( + "get_resources is not implemented by this client", + ) + # TODO: Implement get_resources endpoint + + def get_log(self) -> list[Event]: + """Get the log from the node""" + response = requests.get(f"{self.node.node_url}/log", timeout=10) + if not response.ok: + response.raise_for_status() + return response.json() diff --git a/madsci/madsci_client/pyproject.toml b/madsci/madsci_client/pyproject.toml index e6464ca..28fc0f7 100644 --- a/madsci/madsci_client/pyproject.toml +++ b/madsci/madsci_client/pyproject.toml @@ -1,35 +1,35 @@ -[project] -name = "madsci.client" -dynamic = ["version"] -description = "The Modular Autonomous Discovery for Science (MADSci) Python Client and CLI." -authors = [ - {name = "Tobias Ginsburg", email = "tginsburg@anl.gov"}, - {name = "Ryan D. Lewis", email = "ryan.lewis@anl.gov"}, - {name = "Casey Stone", email = "cstone@anl.gov"}, - {name = "Doga Ozgulbas", email = "dozgulbas@anl.gov"}, -] -requires-python = ">=3.9.1" -readme = "README.md" -license = {text = "MIT"} -dependencies = [ - "madsci.common", - "click>=8.1.7", - "trogon>=0.6.0" -] - -[project.urls] -Homepage = "https://github.com/AD-SDL/MADSci" - -[project.scripts] -madsci = "madsci.client.cli:root_cli" - -###################### -# Build Info + Tools # -###################### - -[build-system] -requires = ["pdm-backend"] -build-backend = "pdm.backend" - -#[dependency-groups] -#dev = ["-e madsci-common @ file:///${PROJECT_ROOT}/../madsci_common"] +[project] +name = "madsci.client" +dynamic = ["version"] +description = "The Modular Autonomous Discovery for Science (MADSci) Python Client and CLI." +authors = [ + {name = "Tobias Ginsburg", email = "tginsburg@anl.gov"}, + {name = "Ryan D. Lewis", email = "ryan.lewis@anl.gov"}, + {name = "Casey Stone", email = "cstone@anl.gov"}, + {name = "Doga Ozgulbas", email = "dozgulbas@anl.gov"}, +] +requires-python = ">=3.9.1" +readme = "README.md" +license = {text = "MIT"} +dependencies = [ + "madsci.common", + "click>=8.1.7", + "trogon>=0.6.0" +] + +[project.urls] +Homepage = "https://github.com/AD-SDL/MADSci" + +[project.scripts] +madsci = "madsci.client.cli:root_cli" + +###################### +# Build Info + Tools # +###################### + +[build-system] +requires = ["pdm-backend"] +build-backend = "pdm.backend" + +#[dependency-groups] +#dev = ["-e madsci-common @ file:///${PROJECT_ROOT}/../madsci_common"] diff --git a/madsci/madsci_common/madsci/common/__init__.py b/madsci/madsci_common/madsci/common/__init__.py index 29d4850..e72cb7e 100644 --- a/madsci/madsci_common/madsci/common/__init__.py +++ b/madsci/madsci_common/madsci/common/__init__.py @@ -1 +1 @@ -"""Common code for the MADSci project.""" +"""Common code for the MADSci project.""" diff --git a/madsci/madsci_common/madsci/common/definition_loaders.py b/madsci/madsci_common/madsci/common/definition_loaders.py index dbd7e08..ecce8dc 100644 --- a/madsci/madsci_common/madsci/common/definition_loaders.py +++ b/madsci/madsci_common/madsci/common/definition_loaders.py @@ -1,140 +1,140 @@ -"""MADSci Configuration Loaders.""" - -import argparse -import json -from pathlib import Path -from typing import Any - -from dotenv import load_dotenv - -from madsci.common.types.base_types import BaseModel -from madsci.common.types.module_types import NodeModuleDefinition -from madsci.common.types.node_types import ( - NodeDefinition, - get_module_from_node_definition, -) -from madsci.common.types.squid_types import LabDefinition -from madsci.common.utils import search_for_file_pattern - - -def madsci_definition_loader( - model: type[BaseModel] = BaseModel, - definition_file_pattern: str = "*.yaml", - search_for_file: bool = True, -) -> BaseModel: - """MADSci Definition Loader. Supports loading from a definition file, environment variables, and command line arguments, in reverse order of priority (i.e. command line arguments override environment variables, which override definition file values).""" - - # Load environment variables from a .env file - load_dotenv() - - parser = argparse.ArgumentParser(description="MADSci Definition Loader") - parser.add_argument( - "--definition", - type=Path, - help="The path to the MADSci configuration file.", - ) - args, _ = parser.parse_known_args() - definition_file = args.definition - if not definition_file: - if not search_for_file: - raise ValueError( - "Definition file not specified, please specify a definition file using the --definition argument.", - ) - - # *Load from definition file - if search_for_file: - definition_files = search_for_file_pattern( - definition_file_pattern, - parents=True, - children=True, - ) - if not definition_files: - raise ValueError( - f"No definition files found matching pattern: {definition_file_pattern}. Please specify a valid configuration file path using the --definition argument.", - ) - - definition_file = definition_files[0] - - return model.from_yaml(definition_file) - - -def lab_definition_loader( - model: type[BaseModel] = LabDefinition, - definition_file_pattern: str = "*.lab.yaml", - **kwargs: Any, -) -> LabDefinition: - """Lab Definition Loader. Supports loading from a definition file, environment variables, and command line arguments, in reverse order of priority (i.e. command line arguments override environment variables, which override definition file values).""" - definition = madsci_definition_loader( - model=model, - definition_file_pattern=definition_file_pattern, - **kwargs, - ) - for field_name, field in definition.lab_config.model_fields.items(): - parser = argparse.ArgumentParser( - description=f"MADSci Lab Definition Loader for {field_name}", - ) - parser.add_argument( - f"--{field_name}", - type=str, - help=f"[{field.annotation}] {field.description}", - default=None, - ) - args, _ = parser.parse_known_args() - for field_name in definition.lab_config.model_fields: - if getattr(args, field_name) is not None: - setattr( - definition.lab_config, - field_name, - json.loads(getattr(args, field_name)), - ) - definition.model_validate(definition) - return definition - - -def node_definition_loader( - model: type[BaseModel] = NodeDefinition, - definition_file_pattern: str = "*.node.yaml", - **kwargs: Any, -) -> tuple[NodeDefinition, NodeModuleDefinition, dict[str, Any]]: - """Node Definition Loader. Supports loading from a definition file, environment variables, and command line arguments, in reverse order of priority (i.e. command line arguments override environment variables, which override definition file values).""" - - # * Load the node definition file - node_definition = madsci_definition_loader( - model=model, - definition_file_pattern=definition_file_pattern, - **kwargs, - ) - - module_definition = get_module_from_node_definition(node_definition) - - combined_config = node_definition.config.copy() - - # * Import any module config from the module definition - for config_name, config in module_definition.config.items(): - # * Only add the config if it isn't already defined in the node definition - if config_name not in node_definition.config: - combined_config[config_name] = config - - # * Load the node configuration from the command line - parser = argparse.ArgumentParser(description="MADSci Node Definition Loader") - for field_name, field in combined_config.items(): - parser.add_argument( - f"--{field_name}", - type=str, - help=field.description, - default=field.default, - required=False, - ) - args, _ = parser.parse_known_args() - config_values = {} - for arg_name, arg_value in vars(args).items(): - if arg_value is not None: - try: - config_values[arg_name] = json.loads(str(arg_value)) - except json.JSONDecodeError: - config_values[arg_name] = arg_value - else: - config_values[arg_name] = field.default - - # * Return the node and module definitions - return node_definition, module_definition, config_values +"""MADSci Configuration Loaders.""" + +import argparse +import json +from pathlib import Path +from typing import Any + +from dotenv import load_dotenv + +from madsci.common.types.base_types import BaseModel +from madsci.common.types.module_types import NodeModuleDefinition +from madsci.common.types.node_types import ( + NodeDefinition, + get_module_from_node_definition, +) +from madsci.common.types.squid_types import LabDefinition +from madsci.common.utils import search_for_file_pattern + + +def madsci_definition_loader( + model: type[BaseModel] = BaseModel, + definition_file_pattern: str = "*.yaml", + search_for_file: bool = True, +) -> BaseModel: + """MADSci Definition Loader. Supports loading from a definition file, environment variables, and command line arguments, in reverse order of priority (i.e. command line arguments override environment variables, which override definition file values).""" + + # Load environment variables from a .env file + load_dotenv() + + parser = argparse.ArgumentParser(description="MADSci Definition Loader") + parser.add_argument( + "--definition", + type=Path, + help="The path to the MADSci configuration file.", + ) + args, _ = parser.parse_known_args() + definition_file = args.definition + if not definition_file: + if not search_for_file: + raise ValueError( + "Definition file not specified, please specify a definition file using the --definition argument.", + ) + + # *Load from definition file + if search_for_file: + definition_files = search_for_file_pattern( + definition_file_pattern, + parents=True, + children=True, + ) + if not definition_files: + raise ValueError( + f"No definition files found matching pattern: {definition_file_pattern}. Please specify a valid configuration file path using the --definition argument.", + ) + + definition_file = definition_files[0] + + return model.from_yaml(definition_file) + + +def lab_definition_loader( + model: type[BaseModel] = LabDefinition, + definition_file_pattern: str = "*.lab.yaml", + **kwargs: Any, +) -> LabDefinition: + """Lab Definition Loader. Supports loading from a definition file, environment variables, and command line arguments, in reverse order of priority (i.e. command line arguments override environment variables, which override definition file values).""" + definition = madsci_definition_loader( + model=model, + definition_file_pattern=definition_file_pattern, + **kwargs, + ) + for field_name, field in definition.lab_config.model_fields.items(): + parser = argparse.ArgumentParser( + description=f"MADSci Lab Definition Loader for {field_name}", + ) + parser.add_argument( + f"--{field_name}", + type=str, + help=f"[{field.annotation}] {field.description}", + default=None, + ) + args, _ = parser.parse_known_args() + for field_name in definition.lab_config.model_fields: + if getattr(args, field_name) is not None: + setattr( + definition.lab_config, + field_name, + json.loads(getattr(args, field_name)), + ) + definition.model_validate(definition) + return definition + + +def node_definition_loader( + model: type[BaseModel] = NodeDefinition, + definition_file_pattern: str = "*.node.yaml", + **kwargs: Any, +) -> tuple[NodeDefinition, NodeModuleDefinition, dict[str, Any]]: + """Node Definition Loader. Supports loading from a definition file, environment variables, and command line arguments, in reverse order of priority (i.e. command line arguments override environment variables, which override definition file values).""" + + # * Load the node definition file + node_definition = madsci_definition_loader( + model=model, + definition_file_pattern=definition_file_pattern, + **kwargs, + ) + + module_definition = get_module_from_node_definition(node_definition) + + combined_config = node_definition.config.copy() + + # * Import any module config from the module definition + for config_name, config in module_definition.config.items(): + # * Only add the config if it isn't already defined in the node definition + if config_name not in node_definition.config: + combined_config[config_name] = config + + # * Load the node configuration from the command line + parser = argparse.ArgumentParser(description="MADSci Node Definition Loader") + for field_name, field in combined_config.items(): + parser.add_argument( + f"--{field_name}", + type=str, + help=field.description, + default=field.default, + required=False, + ) + args, _ = parser.parse_known_args() + config_values = {} + for arg_name, arg_value in vars(args).items(): + if arg_value is not None: + try: + config_values[arg_name] = json.loads(str(arg_value)) + except json.JSONDecodeError: + config_values[arg_name] = arg_value + else: + config_values[arg_name] = field.default + + # * Return the node and module definitions + return node_definition, module_definition, config_values diff --git a/madsci/madsci_common/madsci/common/events.py b/madsci/madsci_common/madsci/common/events.py index 6e8b131..f53f89f 100644 --- a/madsci/madsci_common/madsci/common/events.py +++ b/madsci/madsci_common/madsci/common/events.py @@ -1,61 +1,61 @@ -"""MADSci Event Handling.""" - -import logging -from typing import Optional - -from madsci.common.types.event_types import Event - - -class MADSciEventLogger: - """A logger for MADSci events.""" - - def __init__( - self, - name: Optional[str] = None, - log_level: int = logging.INFO, - event_server: Optional[str] = None, - ) -> None: - """Initialize the event logger.""" - if name: - self.logger = logging.getLogger(__name__ + "." + name) - else: - self.logger = logging.getLogger(__name__) - self.logger.setLevel(log_level) - self.event_server = event_server - - def get_log(self) -> list[Event]: - """Read the log""" - # TODO: Read logs - - def log(self, event: Event, level: Optional[int] = None) -> None: - """Log an event.""" - event.log_level = level if level else event.log_level - logging.log(event.log_level, event.event_data) - if self.event_server: - self.send_event(event) - - def log_debug(self, event: Event) -> None: - """Log an event at the debug level.""" - self.log(event, logging.DEBUG) - - def log_info(self, event: Event) -> None: - """Log an event at the info level.""" - self.log(event, logging.INFO) - - def log_warning(self, event: Event) -> None: - """Log an event at the warning level.""" - self.log(event, logging.WARNING) - - def log_error(self, event: Event) -> None: - """Log an event at the error level.""" - self.log(event, logging.ERROR) - - def log_critical(self, event: Event) -> None: - """Log an event at the critical level.""" - self.log(event, logging.CRITICAL) - - def send_event(self, event: Event) -> None: - """Send an event to the event manager.""" - - -default_event_logger = MADSciEventLogger() +"""MADSci Event Handling.""" + +import logging +from typing import Optional + +from madsci.common.types.event_types import Event + + +class MADSciEventLogger: + """A logger for MADSci events.""" + + def __init__( + self, + name: Optional[str] = None, + log_level: int = logging.INFO, + event_server: Optional[str] = None, + ) -> None: + """Initialize the event logger.""" + if name: + self.logger = logging.getLogger(__name__ + "." + name) + else: + self.logger = logging.getLogger(__name__) + self.logger.setLevel(log_level) + self.event_server = event_server + + def get_log(self) -> list[Event]: + """Read the log""" + # TODO: Read logs + + def log(self, event: Event, level: Optional[int] = None) -> None: + """Log an event.""" + event.log_level = level if level else event.log_level + logging.log(event.log_level, event.event_data) + if self.event_server: + self.send_event(event) + + def log_debug(self, event: Event) -> None: + """Log an event at the debug level.""" + self.log(event, logging.DEBUG) + + def log_info(self, event: Event) -> None: + """Log an event at the info level.""" + self.log(event, logging.INFO) + + def log_warning(self, event: Event) -> None: + """Log an event at the warning level.""" + self.log(event, logging.WARNING) + + def log_error(self, event: Event) -> None: + """Log an event at the error level.""" + self.log(event, logging.ERROR) + + def log_critical(self, event: Event) -> None: + """Log an event at the critical level.""" + self.log(event, logging.CRITICAL) + + def send_event(self, event: Event) -> None: + """Send an event to the event manager.""" + + +default_event_logger = MADSciEventLogger() diff --git a/madsci/madsci_common/madsci/common/exceptions.py b/madsci/madsci_common/madsci/common/exceptions.py index e76e52c..382d5c2 100644 --- a/madsci/madsci_common/madsci/common/exceptions.py +++ b/madsci/madsci_common/madsci/common/exceptions.py @@ -1,13 +1,13 @@ -"""Exceptions common across the MADSci Framework""" - - -class ActionMissingArgumentError(ValueError): - """An action was requested with a missing argument""" - - -class ActionMissingFileError(ValueError): - """An action was requested with a missing file argument""" - - -class ActionNotImplementedError(ValueError): - """An action was requested, but isn't implemented by the node""" +"""Exceptions common across the MADSci Framework""" + + +class ActionMissingArgumentError(ValueError): + """An action was requested with a missing argument""" + + +class ActionMissingFileError(ValueError): + """An action was requested with a missing file argument""" + + +class ActionNotImplementedError(ValueError): + """An action was requested, but isn't implemented by the node""" diff --git a/madsci/madsci_common/madsci/common/types/__init__.py b/madsci/madsci_common/madsci/common/types/__init__.py index d7f2bdf..42e7d9f 100644 --- a/madsci/madsci_common/madsci/common/types/__init__.py +++ b/madsci/madsci_common/madsci/common/types/__init__.py @@ -1 +1 @@ -"""Common Types for the MADSci Framework.""" +"""Common Types for the MADSci Framework.""" diff --git a/madsci/madsci_common/madsci/common/types/action_types.py b/madsci/madsci_common/madsci/common/types/action_types.py index 87f3b31..56d5631 100644 --- a/madsci/madsci_common/madsci/common/types/action_types.py +++ b/madsci/madsci_common/madsci/common/types/action_types.py @@ -1,351 +1,351 @@ -"""Types for MADSci Actions.""" - -import json -from enum import Enum -from typing import Any, Literal, Optional, Union - -from pydantic.functional_validators import field_validator, model_validator -from sqlmodel.main import Field - -from madsci.common.types.base_types import BaseModel, Error, PathLike, new_ulid_str - - -class ActionStatus(str, Enum): - """Status for a step of a workflow""" - - NOT_STARTED = "not_started" - NOT_READY = "not_ready" - RUNNING = "running" - SUCCEEDED = "succeeded" - FAILED = "failed" - CANCELLED = "cancelled" - PAUSED = "paused" - - -class ActionRequest(BaseModel): - """Request to perform an action on a module""" - - action_id: str = Field( - title="Action ID", - description="The ID of the action.", - default_factory=new_ulid_str, - ) - action_name: str = Field( - title="Action Name", - description="The name of the action to perform.", - ) - """Name of the action to perform""" - args: Optional[dict[str, Any]] = Field( - title="Action Arguments", - description="Arguments for the action.", - default_factory=dict, - ) - """Arguments for the action""" - files: dict[str, PathLike] = Field( - title="Action Files", - description="Files sent along with the action.", - default_factory=dict, - ) - """Files sent along with the action""" - - @field_validator("args", mode="before") - @classmethod - def validate_args(cls, v: Any) -> Any: - """Validate the args field of the action request. If it's a string, it's parsed as JSON.""" - if isinstance(v, str): - v = json.loads(v) - if v is None: - return {} - return v - - def failed( - self, - errors: Union[Error, list[Error], str] = [], - data: dict[str, Any] = {}, - files: dict[str, PathLike] = {}, - ) -> "ActionFailed": - """Create an ActionFailed response""" - # * Convert errors to a list of errors if they are a single error or a string - if isinstance(errors, str): - errors = [Error(message=errors)] - elif isinstance(errors, Error): - errors = [errors] - return ActionFailed( - action_id=self.action_id, - errors=errors, - data=data, - files=files, - ) - - def succeeded( - self, - data: dict[str, Any] = {}, - files: dict[str, PathLike] = {}, - errors: Union[Error, list[Error], str] = [], - ) -> "ActionSucceeded": - """Create an ActionSucceeded response""" - return ActionSucceeded( - action_id=self.action_id, - errors=errors, - data=data, - files=files, - ) - - def running( - self, - data: dict[str, Any] = {}, - files: dict[str, PathLike] = {}, - errors: Union[Error, list[Error], str] = [], - ) -> "ActionRunning": - """Create an ActionRunning response""" - return ActionRunning( - action_id=self.action_id, - errors=errors, - data=data, - files=files, - ) - - def not_ready( - self, - errors: Union[Error, list[Error], str] = [], - data: dict[str, Any] = {}, - files: dict[str, PathLike] = {}, - ) -> "ActionNotReady": - """Create an ActionNotReady response""" - return ActionNotReady( - action_id=self.action_id, - errors=errors, - data=data, - files=files, - ) - - def cancelled( - self, - errors: Union[Error, list[Error], str] = [], - data: dict[str, Any] = {}, - files: dict[str, PathLike] = {}, - ) -> "ActionCancelled": - """Create an ActionCancelled response""" - return ActionCancelled( - action_id=self.action_id, - errors=errors, - data=data, - files=files, - ) - - -class ActionResult(BaseModel): - """Result of an action.""" - - action_id: str = Field( - title="Action ID", - description="The ID of the action.", - default_factory=new_ulid_str, - ) - status: ActionStatus = Field( - title="Step Status", - description="The status of the step.", - ) - errors: list[Error] = Field( - title="Step Error", - description="An error message(s) if the step failed.", - default=list, - ) - data: dict[str, Any] = Field( - title="Step Result", - description="The result of the step.", - default_factory=dict, - ) - files: dict[str, PathLike] = Field( - title="Step Files", - description="A dictionary of files produced by the step.", - default_factory=dict, - ) - - -class ActionSucceeded(ActionResult): - """Response from an action that succeeded.""" - - status: Literal[ActionStatus.SUCCEEDED] = ActionStatus.SUCCEEDED - - -class ActionFailed(ActionResult): - """Response from an action that failed.""" - - status: Literal[ActionStatus.FAILED] = ActionStatus.FAILED - - -class ActionCancelled(ActionResult): - """Response from an action that was cancelled.""" - - status: Literal[ActionStatus.CANCELLED] = ActionStatus.CANCELLED - - -class ActionRunning(ActionResult): - """Response from an action that is running.""" - - status: Literal[ActionStatus.RUNNING] = ActionStatus.RUNNING - - -class ActionNotReady(ActionResult): - """Response from an action that is not ready to be run.""" - - status: Literal[ActionStatus.NOT_READY] = ActionStatus.NOT_READY - - -class ActionDefinition(BaseModel): - """Definition of an action.""" - - name: str = Field( - title="Action Name", - description="The name of the action.", - ) - description: str = Field( - title="Action Description", - description="A description of the action.", - ) - args: Union[ - dict[str, "ActionArgumentDefinition"], - list["ActionArgumentDefinition"], - ] = Field( - title="Action Arguments", - description="The arguments of the action.", - default_factory=dict, - ) - files: Union[dict[str, PathLike], list[PathLike]] = Field( - title="Action File Arguments", - description="The file arguments of the action.", - default_factory=dict, - ) - results: Union[ - dict[str, "ActionResultDefinition"], - list["ActionResultDefinition"], - ] = Field( - title="Action Results", - description="The results of the action.", - default_factory=dict, - ) - blocking: bool = Field( - title="Blocking", - description="Whether the action is blocking.", - default=False, - ) - - @field_validator("args", mode="after") - @classmethod - def ensure_args_are_dict(cls, v: Any) -> Any: - """Ensure that the args are a dictionary""" - if isinstance(v, list): - return {arg.name: arg for arg in v} - return v - - @field_validator("files", mode="after") - @classmethod - def ensure_files_are_dict(cls, v: Any) -> Any: - """Ensure that the files are a dictionary""" - if isinstance(v, list): - return {file.name: file for file in v} - return v - - @field_validator("results", mode="after") - @classmethod - def ensure_results_are_dict(cls, v: Any) -> Any: - """Ensure that the results are a dictionary""" - if isinstance(v, list): - return {result.result_label: result for result in v} - return v - - @model_validator(mode="after") - @classmethod - def ensure_name_uniqueness(cls, v: Any) -> Any: - """Ensure that the names of the arguments and files are unique""" - names = set() - for arg in v.args.values(): - if arg.name in names: - raise ValueError(f"Action name '{arg.name}' is not unique") - names.add(arg.name) - for file in v.files.values(): - if file.name in names: - raise ValueError(f"File name '{file.name}' is not unique") - names.add(file.name) - return v - - -class ActionArgumentDefinition(BaseModel): - """Defines an argument for a module action""" - - name: str = Field( - title="Argument Name", - description="The name of the argument.", - ) - description: str = Field( - title="Argument Description", - description="A description of the argument.", - ) - type: str = Field( - title="Argument Type", description="Any type information about the argument" - ) - required: bool = Field( - title="Argument Required", - description="Whether the argument is required.", - ) - default: Optional[Any] = Field( - title="Argument Default", - description="The default value of the argument.", - default=None, - ) - - -class ActionFileDefinition(BaseModel): - """Defines a file for a module action""" - - name: str = Field( - title="File Name", - description="The name of the file.", - ) - required: bool = Field( - title="File Required", - description="Whether the file is required.", - ) - description: str = Field( - title="File Description", - description="A description of the file.", - ) - - -class ActionResultDefinition(BaseModel): - """Defines a result for a module action""" - - result_label: str = Field( - title="Result Label", - description="The label of the result.", - ) - description: str = Field( - title="Result Description", - description="A description of the result.", - default=None, - ) - result_type: str = Field( - title="Result Type", - description="The type of the result.", - ) - - -class FileActionResultDefinition(ActionResultDefinition): - """Defines a file result for a module action""" - - result_type: Literal["file"] = Field( - title="Result Type", - description="The type of the result.", - default="file", - ) - - -class JSONActionResultDefinition(ActionResultDefinition): - """Defines a JSON result for a module action""" - - result_type: Literal["json"] = Field( - title="Result Type", - description="The type of the result.", - default="json", - ) +"""Types for MADSci Actions.""" + +import json +from enum import Enum +from typing import Any, Literal, Optional, Union + +from pydantic.functional_validators import field_validator, model_validator +from sqlmodel.main import Field + +from madsci.common.types.base_types import BaseModel, Error, PathLike, new_ulid_str + + +class ActionStatus(str, Enum): + """Status for a step of a workflow""" + + NOT_STARTED = "not_started" + NOT_READY = "not_ready" + RUNNING = "running" + SUCCEEDED = "succeeded" + FAILED = "failed" + CANCELLED = "cancelled" + PAUSED = "paused" + + +class ActionRequest(BaseModel): + """Request to perform an action on a module""" + + action_id: str = Field( + title="Action ID", + description="The ID of the action.", + default_factory=new_ulid_str, + ) + action_name: str = Field( + title="Action Name", + description="The name of the action to perform.", + ) + """Name of the action to perform""" + args: Optional[dict[str, Any]] = Field( + title="Action Arguments", + description="Arguments for the action.", + default_factory=dict, + ) + """Arguments for the action""" + files: dict[str, PathLike] = Field( + title="Action Files", + description="Files sent along with the action.", + default_factory=dict, + ) + """Files sent along with the action""" + + @field_validator("args", mode="before") + @classmethod + def validate_args(cls, v: Any) -> Any: + """Validate the args field of the action request. If it's a string, it's parsed as JSON.""" + if isinstance(v, str): + v = json.loads(v) + if v is None: + return {} + return v + + def failed( + self, + errors: Union[Error, list[Error], str] = [], + data: dict[str, Any] = {}, + files: dict[str, PathLike] = {}, + ) -> "ActionFailed": + """Create an ActionFailed response""" + # * Convert errors to a list of errors if they are a single error or a string + if isinstance(errors, str): + errors = [Error(message=errors)] + elif isinstance(errors, Error): + errors = [errors] + return ActionFailed( + action_id=self.action_id, + errors=errors, + data=data, + files=files, + ) + + def succeeded( + self, + data: dict[str, Any] = {}, + files: dict[str, PathLike] = {}, + errors: Union[Error, list[Error], str] = [], + ) -> "ActionSucceeded": + """Create an ActionSucceeded response""" + return ActionSucceeded( + action_id=self.action_id, + errors=errors, + data=data, + files=files, + ) + + def running( + self, + data: dict[str, Any] = {}, + files: dict[str, PathLike] = {}, + errors: Union[Error, list[Error], str] = [], + ) -> "ActionRunning": + """Create an ActionRunning response""" + return ActionRunning( + action_id=self.action_id, + errors=errors, + data=data, + files=files, + ) + + def not_ready( + self, + errors: Union[Error, list[Error], str] = [], + data: dict[str, Any] = {}, + files: dict[str, PathLike] = {}, + ) -> "ActionNotReady": + """Create an ActionNotReady response""" + return ActionNotReady( + action_id=self.action_id, + errors=errors, + data=data, + files=files, + ) + + def cancelled( + self, + errors: Union[Error, list[Error], str] = [], + data: dict[str, Any] = {}, + files: dict[str, PathLike] = {}, + ) -> "ActionCancelled": + """Create an ActionCancelled response""" + return ActionCancelled( + action_id=self.action_id, + errors=errors, + data=data, + files=files, + ) + + +class ActionResult(BaseModel): + """Result of an action.""" + + action_id: str = Field( + title="Action ID", + description="The ID of the action.", + default_factory=new_ulid_str, + ) + status: ActionStatus = Field( + title="Step Status", + description="The status of the step.", + ) + errors: list[Error] = Field( + title="Step Error", + description="An error message(s) if the step failed.", + default=list, + ) + data: dict[str, Any] = Field( + title="Step Result", + description="The result of the step.", + default_factory=dict, + ) + files: dict[str, PathLike] = Field( + title="Step Files", + description="A dictionary of files produced by the step.", + default_factory=dict, + ) + + +class ActionSucceeded(ActionResult): + """Response from an action that succeeded.""" + + status: Literal[ActionStatus.SUCCEEDED] = ActionStatus.SUCCEEDED + + +class ActionFailed(ActionResult): + """Response from an action that failed.""" + + status: Literal[ActionStatus.FAILED] = ActionStatus.FAILED + + +class ActionCancelled(ActionResult): + """Response from an action that was cancelled.""" + + status: Literal[ActionStatus.CANCELLED] = ActionStatus.CANCELLED + + +class ActionRunning(ActionResult): + """Response from an action that is running.""" + + status: Literal[ActionStatus.RUNNING] = ActionStatus.RUNNING + + +class ActionNotReady(ActionResult): + """Response from an action that is not ready to be run.""" + + status: Literal[ActionStatus.NOT_READY] = ActionStatus.NOT_READY + + +class ActionDefinition(BaseModel): + """Definition of an action.""" + + name: str = Field( + title="Action Name", + description="The name of the action.", + ) + description: str = Field( + title="Action Description", + description="A description of the action.", + ) + args: Union[ + dict[str, "ActionArgumentDefinition"], + list["ActionArgumentDefinition"], + ] = Field( + title="Action Arguments", + description="The arguments of the action.", + default_factory=dict, + ) + files: Union[dict[str, PathLike], list[PathLike]] = Field( + title="Action File Arguments", + description="The file arguments of the action.", + default_factory=dict, + ) + results: Union[ + dict[str, "ActionResultDefinition"], + list["ActionResultDefinition"], + ] = Field( + title="Action Results", + description="The results of the action.", + default_factory=dict, + ) + blocking: bool = Field( + title="Blocking", + description="Whether the action is blocking.", + default=False, + ) + + @field_validator("args", mode="after") + @classmethod + def ensure_args_are_dict(cls, v: Any) -> Any: + """Ensure that the args are a dictionary""" + if isinstance(v, list): + return {arg.name: arg for arg in v} + return v + + @field_validator("files", mode="after") + @classmethod + def ensure_files_are_dict(cls, v: Any) -> Any: + """Ensure that the files are a dictionary""" + if isinstance(v, list): + return {file.name: file for file in v} + return v + + @field_validator("results", mode="after") + @classmethod + def ensure_results_are_dict(cls, v: Any) -> Any: + """Ensure that the results are a dictionary""" + if isinstance(v, list): + return {result.result_label: result for result in v} + return v + + @model_validator(mode="after") + @classmethod + def ensure_name_uniqueness(cls, v: Any) -> Any: + """Ensure that the names of the arguments and files are unique""" + names = set() + for arg in v.args.values(): + if arg.name in names: + raise ValueError(f"Action name '{arg.name}' is not unique") + names.add(arg.name) + for file in v.files.values(): + if file.name in names: + raise ValueError(f"File name '{file.name}' is not unique") + names.add(file.name) + return v + + +class ActionArgumentDefinition(BaseModel): + """Defines an argument for a module action""" + + name: str = Field( + title="Argument Name", + description="The name of the argument.", + ) + description: str = Field( + title="Argument Description", + description="A description of the argument.", + ) + type: str = Field( + title="Argument Type", description="Any type information about the argument" + ) + required: bool = Field( + title="Argument Required", + description="Whether the argument is required.", + ) + default: Optional[Any] = Field( + title="Argument Default", + description="The default value of the argument.", + default=None, + ) + + +class ActionFileDefinition(BaseModel): + """Defines a file for a module action""" + + name: str = Field( + title="File Name", + description="The name of the file.", + ) + required: bool = Field( + title="File Required", + description="Whether the file is required.", + ) + description: str = Field( + title="File Description", + description="A description of the file.", + ) + + +class ActionResultDefinition(BaseModel): + """Defines a result for a module action""" + + result_label: str = Field( + title="Result Label", + description="The label of the result.", + ) + description: str = Field( + title="Result Description", + description="A description of the result.", + default=None, + ) + result_type: str = Field( + title="Result Type", + description="The type of the result.", + ) + + +class FileActionResultDefinition(ActionResultDefinition): + """Defines a file result for a module action""" + + result_type: Literal["file"] = Field( + title="Result Type", + description="The type of the result.", + default="file", + ) + + +class JSONActionResultDefinition(ActionResultDefinition): + """Defines a JSON result for a module action""" + + result_type: Literal["json"] = Field( + title="Result Type", + description="The type of the result.", + default="json", + ) diff --git a/madsci/madsci_common/madsci/common/types/admin_command_types.py b/madsci/madsci_common/madsci/common/types/admin_command_types.py index 56b7296..fc705bb 100644 --- a/madsci/madsci_common/madsci/common/types/admin_command_types.py +++ b/madsci/madsci_common/madsci/common/types/admin_command_types.py @@ -1,35 +1,35 @@ -"""Types for Admin Commands.""" - -from enum import Enum - -from sqlmodel.main import Field - -from madsci.common.types.base_types import BaseModel, Error - - -class AdminCommands(str, Enum): - """Valid Admin Commands to send to a Module""" - - SAFETY_STOP = "safety_stop" - RESET = "reset" - PAUSE = "pause" - RESUME = "resume" - CANCEL = "cancel" - SHUTDOWN = "shutdown" - LOCK = "lock" - UNLOCK = "unlock" - - -class AdminCommandResponse(BaseModel): - """Response from an Admin Command""" - - success: bool = Field( - title="Admin Command Success", - description="Whether the admin command was successful.", - default=True, - ) - errors: list[Error] = Field( - title="Admin Command Errors", - description="A list of errors that occurred while executing the admin command.", - default_factory=list, - ) +"""Types for Admin Commands.""" + +from enum import Enum + +from sqlmodel.main import Field + +from madsci.common.types.base_types import BaseModel, Error + + +class AdminCommands(str, Enum): + """Valid Admin Commands to send to a Module""" + + SAFETY_STOP = "safety_stop" + RESET = "reset" + PAUSE = "pause" + RESUME = "resume" + CANCEL = "cancel" + SHUTDOWN = "shutdown" + LOCK = "lock" + UNLOCK = "unlock" + + +class AdminCommandResponse(BaseModel): + """Response from an Admin Command""" + + success: bool = Field( + title="Admin Command Success", + description="Whether the admin command was successful.", + default=True, + ) + errors: list[Error] = Field( + title="Admin Command Errors", + description="A list of errors that occurred while executing the admin command.", + default_factory=list, + ) diff --git a/madsci/madsci_common/madsci/common/types/auth_types.py b/madsci/madsci_common/madsci/common/types/auth_types.py index b6d9b04..57452d9 100644 --- a/madsci/madsci_common/madsci/common/types/auth_types.py +++ b/madsci/madsci_common/madsci/common/types/auth_types.py @@ -1,98 +1,98 @@ -"""Types related to authentication and ownership of MADSci objects.""" - -from typing import Optional - -from pydantic.functional_validators import field_validator -from sqlmodel.main import Field - -from madsci.common.types.base_types import BaseModel -from madsci.common.types.validators import ulid_validator - - -class OwnershipInfo(BaseModel): - """Information about the ownership of a MADSci object.""" - - auth_id: str = Field( - title="Auth ID", - description="The ID of the auth that owns the object.", - ) - - user_id: Optional[str] = Field( - title="User ID", - description="The ID of the user who owns the object.", - default=None, - ) - experiment_id: Optional[str] = Field( - title="Experiment ID", - description="The ID of the experiment that owns the object.", - default=None, - ) - campaign_id: Optional[str] = Field( - title="Campaign ID", - description="The ID of the campaign that owns the object.", - default=None, - ) - project_id: Optional[str] = Field( - title="Project ID", - description="The ID of the project that owns the object.", - default=None, - ) - node_id: Optional[str] = Field( - title="Node ID", - description="The ID of the node that owns the object.", - default=None, - ) - workcell_id: Optional[str] = Field( - title="Workcell ID", - description="The ID of the workcell that owns the object.", - default=None, - ) - lab_id: Optional[str] = Field( - title="Lab ID", - description="The ID of the lab that owns the object.", - default=None, - ) - - is_ulid = field_validator( - "user_id", - "experiment_id", - "campaign_id", - "project_id", - "node_id", - "workcell_id", - mode="after", - )(ulid_validator) - - -class UserInfo(BaseModel): - """Information about a user.""" - - user_id: str = Field(title="User ID", description="The ID of the user.") - user_name: str = Field(title="User Name", description="The name of the user.") - user_email: str = Field(title="User Email", description="The email of the user.") - - is_ulid = field_validator("user_id", mode="after")(ulid_validator) - - -class ProjectInfo(BaseModel): - """Information about a project.""" - - project_id: str = Field(title="Project ID", description="The ID of the project.") - project_name: str = Field( - title="Project Name", - description="The name of the project.", - ) - project_description: str = Field( - title="Project Description", - description="The description of the project.", - ) - project_owner: UserInfo = Field( - title="Project Owner", - description="The owner of the project.", - ) - project_members: list[UserInfo] = Field( - title="Project Members", - description="The members of the project.", - ) - - is_ulid = field_validator("project_id", mode="after")(ulid_validator) +"""Types related to authentication and ownership of MADSci objects.""" + +from typing import Optional + +from pydantic.functional_validators import field_validator +from sqlmodel.main import Field + +from madsci.common.types.base_types import BaseModel +from madsci.common.types.validators import ulid_validator + + +class OwnershipInfo(BaseModel): + """Information about the ownership of a MADSci object.""" + + auth_id: str = Field( + title="Auth ID", + description="The ID of the auth that owns the object.", + ) + + user_id: Optional[str] = Field( + title="User ID", + description="The ID of the user who owns the object.", + default=None, + ) + experiment_id: Optional[str] = Field( + title="Experiment ID", + description="The ID of the experiment that owns the object.", + default=None, + ) + campaign_id: Optional[str] = Field( + title="Campaign ID", + description="The ID of the campaign that owns the object.", + default=None, + ) + project_id: Optional[str] = Field( + title="Project ID", + description="The ID of the project that owns the object.", + default=None, + ) + node_id: Optional[str] = Field( + title="Node ID", + description="The ID of the node that owns the object.", + default=None, + ) + workcell_id: Optional[str] = Field( + title="Workcell ID", + description="The ID of the workcell that owns the object.", + default=None, + ) + lab_id: Optional[str] = Field( + title="Lab ID", + description="The ID of the lab that owns the object.", + default=None, + ) + + is_ulid = field_validator( + "user_id", + "experiment_id", + "campaign_id", + "project_id", + "node_id", + "workcell_id", + mode="after", + )(ulid_validator) + + +class UserInfo(BaseModel): + """Information about a user.""" + + user_id: str = Field(title="User ID", description="The ID of the user.") + user_name: str = Field(title="User Name", description="The name of the user.") + user_email: str = Field(title="User Email", description="The email of the user.") + + is_ulid = field_validator("user_id", mode="after")(ulid_validator) + + +class ProjectInfo(BaseModel): + """Information about a project.""" + + project_id: str = Field(title="Project ID", description="The ID of the project.") + project_name: str = Field( + title="Project Name", + description="The name of the project.", + ) + project_description: str = Field( + title="Project Description", + description="The description of the project.", + ) + project_owner: UserInfo = Field( + title="Project Owner", + description="The owner of the project.", + ) + project_members: list[UserInfo] = Field( + title="Project Members", + description="The members of the project.", + ) + + is_ulid = field_validator("project_id", mode="after")(ulid_validator) diff --git a/madsci/madsci_common/madsci/common/types/base_types.py b/madsci/madsci_common/madsci/common/types/base_types.py index a72ee0e..670a83e 100644 --- a/madsci/madsci_common/madsci/common/types/base_types.py +++ b/madsci/madsci_common/madsci/common/types/base_types.py @@ -1,90 +1,90 @@ -""" -Base types for MADSci. -""" - -import json -from datetime import datetime -from pathlib import Path -from typing import Any, Optional, TypeVar, Union - -import yaml -from pydantic.config import ConfigDict -from pydantic.fields import PrivateAttr -from sqlmodel import SQLModel -from sqlmodel.main import Field -from ulid import ULID - -_T = TypeVar("_T") - -PathLike = Union[str, Path] - - -def new_ulid_str() -> str: - """ - Generate a new ULID string. - """ - return str(ULID()) - - -class BaseModel(SQLModel, use_enum_values=True): - """ - Parent class for all MADSci data models. - """ - - _definition_path: Optional[PathLike] = PrivateAttr( - default=None, - ) - - model_config = ConfigDict( - validate_assignment=True, - ) - - def to_yaml(self, path: PathLike, **kwargs: Any) -> None: - """ - Allows all derived data models to be exported into yaml. - - kwargs are passed to model_dump_json - """ - with Path(path).open(mode="w") as fp: - yaml.dump( - json.loads(self.model_dump_json(**kwargs)), - fp, - indent=2, - sort_keys=False, - ) - - @classmethod - def from_yaml(cls: type[_T], path: PathLike) -> _T: - """ - Allows all derived data models to be loaded from yaml. - """ - with Path(path).open() as fp: - raw_data = yaml.safe_load(fp) - model_instance = cls.model_validate(raw_data) - model_instance._definition_path = path - return model_instance - - -class Error(BaseModel): - """A MADSci Error""" - - message: Optional[str] = Field( - title="Message", - description="The error message.", - default=None, - ) - logged_at: datetime = Field( - title="Logged At", - description="The timestamp of when the error was logged.", - default_factory=datetime.now, - ) - error_type: Optional[str] = Field( - title="Error Type", - description="The type of error.", - default=None, - ) - - @classmethod - def from_exception(cls, exception: Exception) -> "Error": - """Create an error from an exception.""" - return cls(message=str(exception), error_type=type(exception).__name__) +""" +Base types for MADSci. +""" + +import json +from datetime import datetime +from pathlib import Path +from typing import Any, Optional, TypeVar, Union + +import yaml +from pydantic.config import ConfigDict +from pydantic.fields import PrivateAttr +from sqlmodel import SQLModel +from sqlmodel.main import Field +from ulid import ULID + +_T = TypeVar("_T") + +PathLike = Union[str, Path] + + +def new_ulid_str() -> str: + """ + Generate a new ULID string. + """ + return str(ULID()) + + +class BaseModel(SQLModel, use_enum_values=True): + """ + Parent class for all MADSci data models. + """ + + _definition_path: Optional[PathLike] = PrivateAttr( + default=None, + ) + + model_config = ConfigDict( + validate_assignment=True, + ) + + def to_yaml(self, path: PathLike, **kwargs: Any) -> None: + """ + Allows all derived data models to be exported into yaml. + + kwargs are passed to model_dump_json + """ + with Path(path).open(mode="w") as fp: + yaml.dump( + json.loads(self.model_dump_json(**kwargs)), + fp, + indent=2, + sort_keys=False, + ) + + @classmethod + def from_yaml(cls: type[_T], path: PathLike) -> _T: + """ + Allows all derived data models to be loaded from yaml. + """ + with Path(path).open() as fp: + raw_data = yaml.safe_load(fp) + model_instance = cls.model_validate(raw_data) + model_instance._definition_path = path + return model_instance + + +class Error(BaseModel): + """A MADSci Error""" + + message: Optional[str] = Field( + title="Message", + description="The error message.", + default=None, + ) + logged_at: datetime = Field( + title="Logged At", + description="The timestamp of when the error was logged.", + default_factory=datetime.now, + ) + error_type: Optional[str] = Field( + title="Error Type", + description="The type of error.", + default=None, + ) + + @classmethod + def from_exception(cls, exception: Exception) -> "Error": + """Create an error from an exception.""" + return cls(message=str(exception), error_type=type(exception).__name__) diff --git a/madsci/madsci_common/madsci/common/types/event_types.py b/madsci/madsci_common/madsci/common/types/event_types.py index 3f5e4b2..bb0a5ae 100644 --- a/madsci/madsci_common/madsci/common/types/event_types.py +++ b/madsci/madsci_common/madsci/common/types/event_types.py @@ -1,106 +1,106 @@ -""" -Event types for the MADSci system. -""" - -import logging -from datetime import datetime -from enum import Enum -from typing import Any, Optional - -from pydantic import ConfigDict -from pydantic.functional_validators import field_validator -from sqlmodel import Field - -from madsci.common.types.auth_types import OwnershipInfo -from madsci.common.types.base_types import BaseModel, new_ulid_str -from madsci.common.types.validators import ulid_validator - - -class Event(BaseModel): - """An event in the MADSci system.""" - - model_config = ConfigDict(extra="allow") - - event_id: str = Field( - title="Event ID", - description="The ID of the event.", - default_factory=new_ulid_str, - ) - event_type: "EventType" = Field( - title="Event Type", - description="The type of the event.", - default="unknown", - ) - log_level: "EventLogLevel" = Field( - title="Event Log Level", - description="The log level of the event. Defaults to NOTSET. See https://docs.python.org/3/library/logging.html#logging-levels", - default_factory=lambda: EventLogLevel.NOTSET, - ) - event_timestamp: datetime = Field( - title="Event Timestamp", - description="The timestamp of the event.", - default_factory=datetime.now, - ) - source: Optional[OwnershipInfo] = Field( - title="Source", - description="Information about the source of the event.", - default=None, - ) - event_data: Any = Field( - title="Event Data", - description="The data associated with the event.", - default_factory=dict, - ) - - is_ulid = field_validator("event_id", mode="after")(ulid_validator) - - -class EventLogLevel(int, Enum): - """The log level of an event.""" - - NOTSET = logging.NOTSET - DEBUG = logging.DEBUG - INFO = logging.INFO - WARNING = logging.WARNING - ERROR = logging.ERROR - CRITICAL = logging.CRITICAL - - -class EventType(str, Enum): - """The type of an event.""" - - UNKNOWN = "unknown" - # *Lab Events - LAB_CREATE = "lab_create" - LAB_START = "lab_start" - LAB_STOP = "lab_stop" - # *Node Events - NODE_CREATE = "node_create" - NODE_START = "node_start" - NODE_STOP = "node_stop" - NODE_CONFIG_UPDATE = "node_config_update" - NODE_STATUS_UPDATE = "node_status_update" - # *Workcell Events - WORKCELL_CREATE = "workcell_create" - WORKCELL_START = "workcell_start" - WORKCELL_STOP = "workcell_stop" - WORKCELL_CONFIG_UPDATE = "workcell_config_update" - WORKCELL_STATUS_UPDATE = "workcell_status_update" - # *Workflow Events - WORKFLOW_CREATE = "workflow_create" - WORKFLOW_START = "workflow_start" - WORKFLOW_COMPLETE = "workflow_complete" - WORKFLOW_ABORT = "workflow_abort" - # *Experiment Events - EXPERIMENT_CREATE = "experiment_create" - EXPERIMENT_START = "experiment_start" - EXPERIMENT_STOP = "experiment_stop" - EXPERIMENT_CONTINUED = "experiment_continued" - EXPERIMENT_PAUSE = "experiment_pause" - EXPERIMENT_COMPLETE = "experiment_complete" - EXPERIMENT_ABORT = "experiment_abort" - # *Campaign Events - CAMPAIGN_CREATE = "campaign_create" - CAMPAIGN_START = "campaign_start" - CAMPAIGN_COMPLETE = "campaign_complete" - CAMPAIGN_ABORT = "campaign_abort" +""" +Event types for the MADSci system. +""" + +import logging +from datetime import datetime +from enum import Enum +from typing import Any, Optional + +from pydantic import ConfigDict +from pydantic.functional_validators import field_validator +from sqlmodel import Field + +from madsci.common.types.auth_types import OwnershipInfo +from madsci.common.types.base_types import BaseModel, new_ulid_str +from madsci.common.types.validators import ulid_validator + + +class Event(BaseModel): + """An event in the MADSci system.""" + + model_config = ConfigDict(extra="allow") + + event_id: str = Field( + title="Event ID", + description="The ID of the event.", + default_factory=new_ulid_str, + ) + event_type: "EventType" = Field( + title="Event Type", + description="The type of the event.", + default="unknown", + ) + log_level: "EventLogLevel" = Field( + title="Event Log Level", + description="The log level of the event. Defaults to NOTSET. See https://docs.python.org/3/library/logging.html#logging-levels", + default_factory=lambda: EventLogLevel.NOTSET, + ) + event_timestamp: datetime = Field( + title="Event Timestamp", + description="The timestamp of the event.", + default_factory=datetime.now, + ) + source: Optional[OwnershipInfo] = Field( + title="Source", + description="Information about the source of the event.", + default=None, + ) + event_data: Any = Field( + title="Event Data", + description="The data associated with the event.", + default_factory=dict, + ) + + is_ulid = field_validator("event_id", mode="after")(ulid_validator) + + +class EventLogLevel(int, Enum): + """The log level of an event.""" + + NOTSET = logging.NOTSET + DEBUG = logging.DEBUG + INFO = logging.INFO + WARNING = logging.WARNING + ERROR = logging.ERROR + CRITICAL = logging.CRITICAL + + +class EventType(str, Enum): + """The type of an event.""" + + UNKNOWN = "unknown" + # *Lab Events + LAB_CREATE = "lab_create" + LAB_START = "lab_start" + LAB_STOP = "lab_stop" + # *Node Events + NODE_CREATE = "node_create" + NODE_START = "node_start" + NODE_STOP = "node_stop" + NODE_CONFIG_UPDATE = "node_config_update" + NODE_STATUS_UPDATE = "node_status_update" + # *Workcell Events + WORKCELL_CREATE = "workcell_create" + WORKCELL_START = "workcell_start" + WORKCELL_STOP = "workcell_stop" + WORKCELL_CONFIG_UPDATE = "workcell_config_update" + WORKCELL_STATUS_UPDATE = "workcell_status_update" + # *Workflow Events + WORKFLOW_CREATE = "workflow_create" + WORKFLOW_START = "workflow_start" + WORKFLOW_COMPLETE = "workflow_complete" + WORKFLOW_ABORT = "workflow_abort" + # *Experiment Events + EXPERIMENT_CREATE = "experiment_create" + EXPERIMENT_START = "experiment_start" + EXPERIMENT_STOP = "experiment_stop" + EXPERIMENT_CONTINUED = "experiment_continued" + EXPERIMENT_PAUSE = "experiment_pause" + EXPERIMENT_COMPLETE = "experiment_complete" + EXPERIMENT_ABORT = "experiment_abort" + # *Campaign Events + CAMPAIGN_CREATE = "campaign_create" + CAMPAIGN_START = "campaign_start" + CAMPAIGN_COMPLETE = "campaign_complete" + CAMPAIGN_ABORT = "campaign_abort" diff --git a/madsci/madsci_common/madsci/common/types/location_types.py b/madsci/madsci_common/madsci/common/types/location_types.py index d461f50..6575c32 100644 --- a/madsci/madsci_common/madsci/common/types/location_types.py +++ b/madsci/madsci_common/madsci/common/types/location_types.py @@ -1,64 +1,64 @@ -"""Location types for MADSci.""" - -from typing import Optional - -from pydantic import Field -from pydantic.functional_validators import field_validator -from pydantic.types import Json - -from madsci.common.types.base_types import BaseModel, new_ulid_str -from madsci.common.types.validators import ulid_validator - - -class Location(BaseModel): - """A location in the lab.""" - - location_name: str = Field( - title="Location Name", - description="The name of the location.", - ) - location_id: str = Field( - title="Location ID", - description="The ID of the location.", - default_factory=new_ulid_str, - ) - description: Optional[str] = Field( - title="Description", - description="A description of the location.", - default=None, - ) - poses: list["Pose"] = Field( - title="Poses", - description="A dictionary of poses representing the location. Keys are node names.", - default=[], - ) - resource_id: Optional[str] = Field( - title="Resource ID", - description="The resource ID linked to the location, typically a container ID.", - default=None, - ) - - is_ulid = field_validator("lab_id")(ulid_validator) - - -class Pose(BaseModel): - """A pose for a location in in the lab.""" - - node_id: str = Field(title="Node ID", description="The ID of the node in the lab.") - pose_id: str = Field( - title="Pose ID", - description="The ID of the pose.", - default_factory=new_ulid_str, - ) - pose_name: str = Field(title="Pose Name", description="The name of the pose.") - pose_description: Optional[str] = Field( - title="Pose Description", - description="A description of the pose.", - default=None, - ) - pose_value: Json = Field( - title="Pose Value", - description="The value of the pose. Any JSON serializable object, representing the pose.", - ) - - is_ulid = field_validator("pose_id")(ulid_validator) +"""Location types for MADSci.""" + +from typing import Optional + +from pydantic import Field +from pydantic.functional_validators import field_validator +from pydantic.types import Json + +from madsci.common.types.base_types import BaseModel, new_ulid_str +from madsci.common.types.validators import ulid_validator + + +class Location(BaseModel): + """A location in the lab.""" + + location_name: str = Field( + title="Location Name", + description="The name of the location.", + ) + location_id: str = Field( + title="Location ID", + description="The ID of the location.", + default_factory=new_ulid_str, + ) + description: Optional[str] = Field( + title="Description", + description="A description of the location.", + default=None, + ) + poses: list["Pose"] = Field( + title="Poses", + description="A dictionary of poses representing the location. Keys are node names.", + default=[], + ) + resource_id: Optional[str] = Field( + title="Resource ID", + description="The resource ID linked to the location, typically a container ID.", + default=None, + ) + + is_ulid = field_validator("lab_id")(ulid_validator) + + +class Pose(BaseModel): + """A pose for a location in in the lab.""" + + node_id: str = Field(title="Node ID", description="The ID of the node in the lab.") + pose_id: str = Field( + title="Pose ID", + description="The ID of the pose.", + default_factory=new_ulid_str, + ) + pose_name: str = Field(title="Pose Name", description="The name of the pose.") + pose_description: Optional[str] = Field( + title="Pose Description", + description="A description of the pose.", + default=None, + ) + pose_value: Json = Field( + title="Pose Value", + description="The value of the pose. Any JSON serializable object, representing the pose.", + ) + + is_ulid = field_validator("pose_id")(ulid_validator) diff --git a/madsci/madsci_common/madsci/common/types/module_types.py b/madsci/madsci_common/madsci/common/types/module_types.py index 6882d9f..f092def 100644 --- a/madsci/madsci_common/madsci/common/types/module_types.py +++ b/madsci/madsci_common/madsci/common/types/module_types.py @@ -1,200 +1,200 @@ -"""Types related to MADSci Modules.""" - -from enum import Enum -from typing import Any, Optional, Union - -from pydantic.functional_validators import field_validator -from sqlmodel.main import Field - -from madsci.common.types.admin_command_types import AdminCommands -from madsci.common.types.base_types import BaseModel - - -class NodeType(str, Enum): - """The type of a MADSci node.""" - - DEVICE = "device" - COMPUTE = "compute" - RESOURCE_MANAGER = "resource_manager" - EVENT_MANAGER = "event_manager" - WORKCELL_MANAGER = "workcell_manager" - DATA_MANAGER = "data_manager" - TRANSFER_MANAGER = "transfer_manager" - - -class NodeModuleDefinition(BaseModel, extra="allow"): - """Definition for a MADSci Node Module.""" - - module_name: str = Field( - title="Node Module Name", - description="The name of the node module.", - ) - module_type: Optional[NodeType] = Field( - title="Module Type", - description="The type of the node module.", - default=None, - ) - module_description: Optional[str] = Field( - default=None, - title="Module Description", - description="A description of the node module.", - ) - capabilities: "NodeCapabilities" = Field( - default_factory=lambda: NodeCapabilities(), - title="Module Capabilities", - description="The capabilities of the node module.", - ) - config: Union[list["ConfigParameter"], dict[str, "ConfigParameter"]] = Field( - title="Module Configuration", - description="The configuration of the node module. These are 'default' configuration parameters inherited by all child nodes.", - default_factory=list, - ) - commands: dict[str, str] = Field( - title="Module Commands", - description="The commands that the node module supports. These are 'default' commands inherited by all child nodes.", - default_factory=dict, - ) - - @field_validator("config", mode="after") - @classmethod - def validate_config( - cls, - v: Union[list["ConfigParameter"], dict[str, "ConfigParameter"]], - ) -> Union[list["ConfigParameter"], dict[str, "ConfigParameter"]]: - """Validate the node module configuration, promoting a list of ConfigParameters to a dictionary for easier access.""" - if isinstance(v, dict): - return v - return {param.name: param for param in v} - - -class ConfigParameter(BaseModel, extra="allow"): - """A parameter for a MADSci Module/Node Configuration.""" - - name: str = Field( - title="Parameter Name", - description="The name of the parameter.", - ) - description: Optional[str] = Field( - title="Parameter Description", - description="A description of the parameter.", - default=None, - ) - default: Optional[Any] = Field( - title="Parameter Default", - description="The default value of the parameter.", - default=None, - ) - required: bool = Field( - title="Parameter Required", - description="Whether the parameter is required.", - default=False, - ) - reset_on_change: bool = Field( - title="Parameter Reset on Change", - description="Whether the node should restart whenever the parameter changes.", - default=True, - ) - - -NODE_MODULE_CONFIG_TEMPLATES: dict[str, list[ConfigParameter]] = { - "REST Module": [ - ConfigParameter( - name="host", - description="The host of the REST API.", - default="127.0.0.1", - required=True, - ), - ConfigParameter( - name="port", - description="The port of the REST API.", - default=8000, - required=True, - ), - ConfigParameter( - name="protocol", - description="The protocol of the REST API, either 'http' or 'https'.", - default="http", - required=True, - ), - ], -} - - -class NodeClientCapabilities(BaseModel): - """Capabilities of a MADSci Node Client.""" - - get_info: bool = Field( - default=False, - title="Module Info", - description="Whether the node supports querying its info.", - ) - get_state: bool = Field( - default=False, - title="Module State", - description="Whether the node supports querying its state.", - ) - get_status: bool = Field( - default=False, - title="Module Status", - description="Whether the node supports querying its status.", - ) - send_action: bool = Field( - default=False, - title="Module Send Action", - description="Whether the node supports sending actions.", - ) - get_action_result: bool = Field( - default=False, - title="Module Get Action", - description="Whether the node supports querying the status of an action.", - ) - get_action_history: bool = Field( - default=False, - title="Module Get Actions", - description="Whether the node supports querying the history of actions.", - ) - action_files: bool = Field( - default=False, - title="Module Action Files", - description="Whether the node supports sending action files.", - ) - send_admin_commands: bool = Field( - default=False, - title="Module Send Admin Commands", - description="Whether the node supports sending admin commands.", - ) - set_config: bool = Field( - default=False, - title="Module Set Config", - description="Whether the node supports setting configuration.", - ) - get_resources: bool = Field( - default=False, - title="Module Get Resources", - description="Whether the node supports querying its resources.", - ) - get_log: bool = Field( - default=False, - title="Module Get Log", - description="Whether the node supports querying its log.", - ) - - -class NodeCapabilities(NodeClientCapabilities): - """Capabilities of a MADSci Node.""" - - events: bool = Field( - default=False, - title="Module Events", - description="Whether the module supports raising MADSci events.", - ) - resources: bool = Field( - default=False, - title="Module Resources", - description="Whether the module supports MADSci-compatible resource management.", - ) - admin_commands: set[AdminCommands] = Field( - default=set(), - title="Module Admin Commands", - description="Which admin commands the module supports, if any.", - ) +"""Types related to MADSci Modules.""" + +from enum import Enum +from typing import Any, Optional, Union + +from pydantic.functional_validators import field_validator +from sqlmodel.main import Field + +from madsci.common.types.admin_command_types import AdminCommands +from madsci.common.types.base_types import BaseModel + + +class NodeType(str, Enum): + """The type of a MADSci node.""" + + DEVICE = "device" + COMPUTE = "compute" + RESOURCE_MANAGER = "resource_manager" + EVENT_MANAGER = "event_manager" + WORKCELL_MANAGER = "workcell_manager" + DATA_MANAGER = "data_manager" + TRANSFER_MANAGER = "transfer_manager" + + +class NodeModuleDefinition(BaseModel, extra="allow"): + """Definition for a MADSci Node Module.""" + + module_name: str = Field( + title="Node Module Name", + description="The name of the node module.", + ) + module_type: Optional[NodeType] = Field( + title="Module Type", + description="The type of the node module.", + default=None, + ) + module_description: Optional[str] = Field( + default=None, + title="Module Description", + description="A description of the node module.", + ) + capabilities: "NodeCapabilities" = Field( + default_factory=lambda: NodeCapabilities(), + title="Module Capabilities", + description="The capabilities of the node module.", + ) + config: Union[list["ConfigParameter"], dict[str, "ConfigParameter"]] = Field( + title="Module Configuration", + description="The configuration of the node module. These are 'default' configuration parameters inherited by all child nodes.", + default_factory=list, + ) + commands: dict[str, str] = Field( + title="Module Commands", + description="The commands that the node module supports. These are 'default' commands inherited by all child nodes.", + default_factory=dict, + ) + + @field_validator("config", mode="after") + @classmethod + def validate_config( + cls, + v: Union[list["ConfigParameter"], dict[str, "ConfigParameter"]], + ) -> Union[list["ConfigParameter"], dict[str, "ConfigParameter"]]: + """Validate the node module configuration, promoting a list of ConfigParameters to a dictionary for easier access.""" + if isinstance(v, dict): + return v + return {param.name: param for param in v} + + +class ConfigParameter(BaseModel, extra="allow"): + """A parameter for a MADSci Module/Node Configuration.""" + + name: str = Field( + title="Parameter Name", + description="The name of the parameter.", + ) + description: Optional[str] = Field( + title="Parameter Description", + description="A description of the parameter.", + default=None, + ) + default: Optional[Any] = Field( + title="Parameter Default", + description="The default value of the parameter.", + default=None, + ) + required: bool = Field( + title="Parameter Required", + description="Whether the parameter is required.", + default=False, + ) + reset_on_change: bool = Field( + title="Parameter Reset on Change", + description="Whether the node should restart whenever the parameter changes.", + default=True, + ) + + +NODE_MODULE_CONFIG_TEMPLATES: dict[str, list[ConfigParameter]] = { + "REST Module": [ + ConfigParameter( + name="host", + description="The host of the REST API.", + default="127.0.0.1", + required=True, + ), + ConfigParameter( + name="port", + description="The port of the REST API.", + default=8000, + required=True, + ), + ConfigParameter( + name="protocol", + description="The protocol of the REST API, either 'http' or 'https'.", + default="http", + required=True, + ), + ], +} + + +class NodeClientCapabilities(BaseModel): + """Capabilities of a MADSci Node Client.""" + + get_info: bool = Field( + default=False, + title="Module Info", + description="Whether the node supports querying its info.", + ) + get_state: bool = Field( + default=False, + title="Module State", + description="Whether the node supports querying its state.", + ) + get_status: bool = Field( + default=False, + title="Module Status", + description="Whether the node supports querying its status.", + ) + send_action: bool = Field( + default=False, + title="Module Send Action", + description="Whether the node supports sending actions.", + ) + get_action_result: bool = Field( + default=False, + title="Module Get Action", + description="Whether the node supports querying the status of an action.", + ) + get_action_history: bool = Field( + default=False, + title="Module Get Actions", + description="Whether the node supports querying the history of actions.", + ) + action_files: bool = Field( + default=False, + title="Module Action Files", + description="Whether the node supports sending action files.", + ) + send_admin_commands: bool = Field( + default=False, + title="Module Send Admin Commands", + description="Whether the node supports sending admin commands.", + ) + set_config: bool = Field( + default=False, + title="Module Set Config", + description="Whether the node supports setting configuration.", + ) + get_resources: bool = Field( + default=False, + title="Module Get Resources", + description="Whether the node supports querying its resources.", + ) + get_log: bool = Field( + default=False, + title="Module Get Log", + description="Whether the node supports querying its log.", + ) + + +class NodeCapabilities(NodeClientCapabilities): + """Capabilities of a MADSci Node.""" + + events: bool = Field( + default=False, + title="Module Events", + description="Whether the module supports raising MADSci events.", + ) + resources: bool = Field( + default=False, + title="Module Resources", + description="Whether the module supports MADSci-compatible resource management.", + ) + admin_commands: set[AdminCommands] = Field( + default=set(), + title="Module Admin Commands", + description="Which admin commands the module supports, if any.", + ) diff --git a/madsci/madsci_common/madsci/common/types/node_types.py b/madsci/madsci_common/madsci/common/types/node_types.py index f45afd6..6d253fc 100644 --- a/madsci/madsci_common/madsci/common/types/node_types.py +++ b/madsci/madsci_common/madsci/common/types/node_types.py @@ -1,270 +1,270 @@ -"""MADSci Node Types.""" - -from os import PathLike -from pathlib import Path -from typing import Any, Optional, Union - -from pydantic import Field -from pydantic.fields import computed_field -from pydantic.functional_validators import field_validator -from pydantic.networks import AnyUrl - -from madsci.common.types.action_types import ActionDefinition -from madsci.common.types.base_types import BaseModel, Error, new_ulid_str -from madsci.common.types.module_types import ConfigParameter, NodeModuleDefinition -from madsci.common.types.validators import ulid_validator - - -def get_module_from_node_definition( - node_definition: "NodeDefinition", -) -> Optional[NodeModuleDefinition]: - """Get the module definition from a node definition. - - Args: - node_definition: The node definition to get the module definition from - - Returns: - The module definition, or None if not found - - Raises: - ValueError: If the module definition path cannot be resolved - """ - if node_definition.module_definition is None: - return None - - # * If it's already a ModuleDefinition instance, return it - if isinstance(node_definition.module_definition, NodeModuleDefinition): - return node_definition.module_definition - - # * Otherwise treat it as a path - module_path = Path(str(node_definition.module_definition)) - - # * If path is relative, try to resolve it - if not module_path.is_absolute(): - # * First try relative to node definition path if set - if node_definition._definition_path: - resolved_path = Path(node_definition._definition_path).parent / module_path - if resolved_path.exists(): - return NodeModuleDefinition.from_yaml(resolved_path) - - # * Otherwise try relative to current working directory - cwd_path = Path.cwd() / module_path - if cwd_path.exists(): - return NodeModuleDefinition.from_yaml(cwd_path) - - raise ValueError( - f"Could not resolve module definition path '{module_path}'. " - f"Tried:\n" - f" - {resolved_path if node_definition._definition_path else 'No node definition path set'}\n" - f" - {cwd_path}", - ) - - # * For absolute paths, just try to load directly - if module_path.exists(): - return NodeModuleDefinition.from_yaml(module_path) - - raise ValueError(f"Module definition file not found at '{module_path}'") - - -class NodeDefinition(BaseModel): - """Definition of a MADSci Node, a unique instance of a MADSci Module.""" - - node_name: str = Field(title="Node Name", description="The name of the node.") - node_id: str = Field( - title="Node ID", - description="The ID of the node.", - default_factory=new_ulid_str, - ) - node_url: Optional[AnyUrl] = Field( - title="Node URL", - description="The URL used to communicate with the node.", - default=None, - ) - node_description: Optional[str] = Field( - title="Description", - description="A description of the node.", - default=None, - ) - module_definition: Optional[Union[NodeModuleDefinition, PathLike]] = Field( - title="Module", - description="Definition of the module that the node is an instance of.", - default=None, - ) # TODO: Add support for pointing to URL - config: Union[list[ConfigParameter], dict[str, ConfigParameter]] = Field( - title="Node Configuration", - description="The configuration for the node.", - default_factory=list, - ) - commands: dict[str, str] = Field( - default_factory=dict, - title="Commands", - description="Commands for operating the node.", - ) - - is_ulid = field_validator("node_id")(ulid_validator) - - @field_validator("config", mode="after") - @classmethod - def validate_config( - cls, - v: Union[list[ConfigParameter], dict[str, ConfigParameter]], - ) -> Union[list[ConfigParameter], dict[str, ConfigParameter]]: - """Validate the node configuration, promoting a list of ConfigParameters to a dictionary for easier access.""" - if isinstance(v, dict): - return v - return {param.name: param for param in v} - - -class Node(BaseModel, arbitrary_types_allowed=True): - """A runtime representation of a MADSci Node used in a Workcell.""" - - node_url: AnyUrl = Field( - title="Node URL", - description="The URL used to communicate with the module.", - ) - status: Optional["NodeStatus"] = Field( - default=None, - title="Module Status", - description="The status of the module. Set to None if the module does not support status reporting or the status is unknown (e.g. if it hasn't reported/responded to status requests).", - ) - info: Optional["NodeInfo"] = Field( - default=None, - title="Node Info", - description="Information about the node, provided by the node itself.", - ) - - -class NodeInfo(NodeDefinition, NodeModuleDefinition): - """Information about a MADSci Node.""" - - actions: dict[str, "ActionDefinition"] = Field( - title="Module Actions", - description="The actions that the module supports.", - default_factory=dict, - ) - config_values: dict[str, Any] = Field( - default_factory=dict, - title="Node Configuration", - description="The configuration of the node.", - ) - - @classmethod - def from_node_and_module( - cls, - node: NodeDefinition, - module: Optional[NodeModuleDefinition] = None, - config_values: Optional[dict[str, Any]] = None, - ) -> "NodeInfo": - """Create a NodeInfo from a NodeDefinition and a ModuleDefinition.""" - if module is None: - module = get_module_from_node_definition(node) - if config_values is None: - config_values = {} - # * Merge the node and module configs and commands, with the node config taking precedence - config = {**module.config, **node.config} - commands = {**module.commands, **node.commands} - return cls( - **node.model_dump(exclude={"config", "commands"}), - **module.model_dump(exclude={"config", "commands"}), - config=config, - commands=commands, - config_values=config_values, - ) - - -class NodeStatus(BaseModel): - """Status of a MADSci Node.""" - - busy: bool = Field( - default=False, - title="Node Busy", - description="Whether the node is currently at capacity, i.e. running the maximum number of actions allowed.", - ) - running_actions: set[str] = Field( - default_factory=set, - title="Running Action IDs", - description="The IDs of the actions that the node is currently running.", - ) - paused: bool = Field( - default=False, - title="Node Paused", - description="Whether the node is paused.", - ) - locked: bool = Field( - default=False, - title="Node Locked", - description="Whether the node is locked, preventing it from accepting any actions.", - ) - errored: bool = Field( - default=False, - title="Node Errored", - description="Whether the node is in an errored state.", - ) - errors: list[Error] = Field( - default_factory=list, - title="Node Errors", - description="A list of errors that the node has encountered.", - ) - initializing: bool = Field( - default=False, - title="Node Initializing", - description="Whether the node is currently initializing.", - ) - waiting_for_config: set[str] = Field( - default_factory=set, - title="Node Waiting for Configuration", - description="Set of configuration parameters that the node is waiting for.", - ) - config_values: dict[str, Any] = Field( - default_factory=dict, - title="Node Configuration Values", - description="The current configuration values of the node.", - ) - - @computed_field - @property - def ready(self) -> bool: - """Whether the node is ready to accept actions.""" - ready = True - if self.busy: - ready = False - if self.locked: - ready = False - if self.errored: - ready = False - if self.initializing: - ready = False - if self.paused: - ready = False - if len(self.waiting_for_config) > 0: - ready = False - return ready - - @computed_field - @property - def description(self) -> str: - """A description of the node's status.""" - reasons = [] - if self.busy: - reasons.append("Node is busy") - if self.locked: - reasons.append("Node is locked") - if self.errored: - reasons.append("Node is in an error state") - if self.initializing: - reasons.append("Node is initializing") - if self.paused: - reasons.append("Node is paused") - if len(self.waiting_for_config) > 0: - reasons.append( - f"Node is missing configuration values: {self.waiting_for_config}", - ) - if reasons: - return "; ".join(reasons) - return "Node is ready" - - -class NodeSetConfigResponse(BaseModel): - """Response from a Node Set Config Request""" - - success: bool +"""MADSci Node Types.""" + +from os import PathLike +from pathlib import Path +from typing import Any, Optional, Union + +from pydantic import Field +from pydantic.fields import computed_field +from pydantic.functional_validators import field_validator +from pydantic.networks import AnyUrl + +from madsci.common.types.action_types import ActionDefinition +from madsci.common.types.base_types import BaseModel, Error, new_ulid_str +from madsci.common.types.module_types import ConfigParameter, NodeModuleDefinition +from madsci.common.types.validators import ulid_validator + + +def get_module_from_node_definition( + node_definition: "NodeDefinition", +) -> Optional[NodeModuleDefinition]: + """Get the module definition from a node definition. + + Args: + node_definition: The node definition to get the module definition from + + Returns: + The module definition, or None if not found + + Raises: + ValueError: If the module definition path cannot be resolved + """ + if node_definition.module_definition is None: + return None + + # * If it's already a ModuleDefinition instance, return it + if isinstance(node_definition.module_definition, NodeModuleDefinition): + return node_definition.module_definition + + # * Otherwise treat it as a path + module_path = Path(str(node_definition.module_definition)) + + # * If path is relative, try to resolve it + if not module_path.is_absolute(): + # * First try relative to node definition path if set + if node_definition._definition_path: + resolved_path = Path(node_definition._definition_path).parent / module_path + if resolved_path.exists(): + return NodeModuleDefinition.from_yaml(resolved_path) + + # * Otherwise try relative to current working directory + cwd_path = Path.cwd() / module_path + if cwd_path.exists(): + return NodeModuleDefinition.from_yaml(cwd_path) + + raise ValueError( + f"Could not resolve module definition path '{module_path}'. " + f"Tried:\n" + f" - {resolved_path if node_definition._definition_path else 'No node definition path set'}\n" + f" - {cwd_path}", + ) + + # * For absolute paths, just try to load directly + if module_path.exists(): + return NodeModuleDefinition.from_yaml(module_path) + + raise ValueError(f"Module definition file not found at '{module_path}'") + + +class NodeDefinition(BaseModel): + """Definition of a MADSci Node, a unique instance of a MADSci Module.""" + + node_name: str = Field(title="Node Name", description="The name of the node.") + node_id: str = Field( + title="Node ID", + description="The ID of the node.", + default_factory=new_ulid_str, + ) + node_url: Optional[AnyUrl] = Field( + title="Node URL", + description="The URL used to communicate with the node.", + default=None, + ) + node_description: Optional[str] = Field( + title="Description", + description="A description of the node.", + default=None, + ) + module_definition: Optional[Union[NodeModuleDefinition, PathLike]] = Field( + title="Module", + description="Definition of the module that the node is an instance of.", + default=None, + ) # TODO: Add support for pointing to URL + config: Union[list[ConfigParameter], dict[str, ConfigParameter]] = Field( + title="Node Configuration", + description="The configuration for the node.", + default_factory=list, + ) + commands: dict[str, str] = Field( + default_factory=dict, + title="Commands", + description="Commands for operating the node.", + ) + + is_ulid = field_validator("node_id")(ulid_validator) + + @field_validator("config", mode="after") + @classmethod + def validate_config( + cls, + v: Union[list[ConfigParameter], dict[str, ConfigParameter]], + ) -> Union[list[ConfigParameter], dict[str, ConfigParameter]]: + """Validate the node configuration, promoting a list of ConfigParameters to a dictionary for easier access.""" + if isinstance(v, dict): + return v + return {param.name: param for param in v} + + +class Node(BaseModel, arbitrary_types_allowed=True): + """A runtime representation of a MADSci Node used in a Workcell.""" + + node_url: AnyUrl = Field( + title="Node URL", + description="The URL used to communicate with the module.", + ) + status: Optional["NodeStatus"] = Field( + default=None, + title="Module Status", + description="The status of the module. Set to None if the module does not support status reporting or the status is unknown (e.g. if it hasn't reported/responded to status requests).", + ) + info: Optional["NodeInfo"] = Field( + default=None, + title="Node Info", + description="Information about the node, provided by the node itself.", + ) + + +class NodeInfo(NodeDefinition, NodeModuleDefinition): + """Information about a MADSci Node.""" + + actions: dict[str, "ActionDefinition"] = Field( + title="Module Actions", + description="The actions that the module supports.", + default_factory=dict, + ) + config_values: dict[str, Any] = Field( + default_factory=dict, + title="Node Configuration", + description="The configuration of the node.", + ) + + @classmethod + def from_node_and_module( + cls, + node: NodeDefinition, + module: Optional[NodeModuleDefinition] = None, + config_values: Optional[dict[str, Any]] = None, + ) -> "NodeInfo": + """Create a NodeInfo from a NodeDefinition and a ModuleDefinition.""" + if module is None: + module = get_module_from_node_definition(node) + if config_values is None: + config_values = {} + # * Merge the node and module configs and commands, with the node config taking precedence + config = {**module.config, **node.config} + commands = {**module.commands, **node.commands} + return cls( + **node.model_dump(exclude={"config", "commands"}), + **module.model_dump(exclude={"config", "commands"}), + config=config, + commands=commands, + config_values=config_values, + ) + + +class NodeStatus(BaseModel): + """Status of a MADSci Node.""" + + busy: bool = Field( + default=False, + title="Node Busy", + description="Whether the node is currently at capacity, i.e. running the maximum number of actions allowed.", + ) + running_actions: set[str] = Field( + default_factory=set, + title="Running Action IDs", + description="The IDs of the actions that the node is currently running.", + ) + paused: bool = Field( + default=False, + title="Node Paused", + description="Whether the node is paused.", + ) + locked: bool = Field( + default=False, + title="Node Locked", + description="Whether the node is locked, preventing it from accepting any actions.", + ) + errored: bool = Field( + default=False, + title="Node Errored", + description="Whether the node is in an errored state.", + ) + errors: list[Error] = Field( + default_factory=list, + title="Node Errors", + description="A list of errors that the node has encountered.", + ) + initializing: bool = Field( + default=False, + title="Node Initializing", + description="Whether the node is currently initializing.", + ) + waiting_for_config: set[str] = Field( + default_factory=set, + title="Node Waiting for Configuration", + description="Set of configuration parameters that the node is waiting for.", + ) + config_values: dict[str, Any] = Field( + default_factory=dict, + title="Node Configuration Values", + description="The current configuration values of the node.", + ) + + @computed_field + @property + def ready(self) -> bool: + """Whether the node is ready to accept actions.""" + ready = True + if self.busy: + ready = False + if self.locked: + ready = False + if self.errored: + ready = False + if self.initializing: + ready = False + if self.paused: + ready = False + if len(self.waiting_for_config) > 0: + ready = False + return ready + + @computed_field + @property + def description(self) -> str: + """A description of the node's status.""" + reasons = [] + if self.busy: + reasons.append("Node is busy") + if self.locked: + reasons.append("Node is locked") + if self.errored: + reasons.append("Node is in an error state") + if self.initializing: + reasons.append("Node is initializing") + if self.paused: + reasons.append("Node is paused") + if len(self.waiting_for_config) > 0: + reasons.append( + f"Node is missing configuration values: {self.waiting_for_config}", + ) + if reasons: + return "; ".join(reasons) + return "Node is ready" + + +class NodeSetConfigResponse(BaseModel): + """Response from a Node Set Config Request""" + + success: bool diff --git a/madsci/madsci_common/madsci/common/types/resource_types.py b/madsci/madsci_common/madsci/common/types/resource_types.py index ec02802..91ee3c7 100644 --- a/madsci/madsci_common/madsci/common/types/resource_types.py +++ b/madsci/madsci_common/madsci/common/types/resource_types.py @@ -1,718 +1,718 @@ -"""Types related to MADSci Resources.""" - -from enum import Enum -from typing import Annotated, Any, Literal, Optional, Union - -from pydantic import Json -from pydantic.config import ConfigDict -from pydantic.functional_validators import field_validator, model_validator -from pydantic.types import Discriminator, Tag -from sqlmodel.main import Field - -from madsci.common.types.auth_types import OwnershipInfo -from madsci.common.types.base_types import BaseModel, new_ulid_str -from madsci.common.types.validators import ulid_validator - - -class ResourceType(str, Enum): - """Type for a MADSci Resource.""" - - resource = "resource" - """The root resource type. Used when a resource type is not known or any resource type is acceptable.""" - asset = "asset" - consumable = "consumable" - - -class AssetType(str, Enum): - """Type for a MADSci Asset.""" - - container = "container" - asset = "asset" - - -class ConsumableType(str, Enum): - """Type for a MADSci Consumable.""" - - discrete_consumable = "discrete_consumable" - continuous_consumable = "continuous_consumable" - - -class ContainerType(str, Enum): - """Type for a MADSci Container.""" - - stack = "stack" - queue = "queue" - collection = "collection" - grid = "grid" - voxel_grid = "voxel_grid" - pool = "pool" - - -ResourceTypes = Union[ResourceType, AssetType, ContainerType, ConsumableType] - - -class ResourceTypeDefinition(BaseModel): - """Definition for a MADSci Resource Type.""" - - model_config = ConfigDict(extra="allow") - - type_name: str = Field( - title="Resource Type Name", - description="The name of the type of resource (i.e. 'plate_96_well_corningware', 'tube_rack_24', etc.).", - ) - type_description: str = Field( - title="Resource Type Description", - description="A description of the custom type of the resource.", - ) - base_type: Literal[ResourceType.resource] = Field( - default=ResourceType.resource, - title="Resource Base Type", - description="The base type of the resource.", - ) - parent_types: list[str] = Field( - default=["resource"], - title="Resource Parent Types", - description="The parent types of the resource.", - ) - custom_attributes: Optional[list["CustomResourceAttributeDefinition"]] = Field( - default=None, - title="Custom Attributes", - description="Custom attributes used by resources of this type.", - ) - - @field_validator("parent_types", mode="before") - @classmethod - def validate_parent_types(cls, v: Union[list[str], str]) -> list[str]: - """Validate parent types.""" - if isinstance(v, str): - return [v] - return v - - -class CustomResourceAttributeDefinition(BaseModel, extra="allow"): - """Definition for a MADSci Custom Resource Attribute.""" - - attribute_name: str = Field( - title="Attribute Name", - description="The name of the attribute.", - ) - attribute_description: Optional[str] = Field( - default=None, - title="Attribute Description", - description="A description of the attribute.", - ) - optional: bool = Field( - default=False, - title="Optional", - description="Whether the attribute is optional.", - ) - default_value: Json[Any] = Field( - default=None, - title="Default Value", - description="The default value of the attribute.", - ) - - -class ContainerResourceTypeDefinition(ResourceTypeDefinition): - """Definition for a MADSci Container Resource Type.""" - - supported_child_types: list[str] = Field( - title="Supported Child Types", - description="The resource types for children supported by the container. If `resource` is included, the container can contain any resource type.", - ) - default_capacity: Optional[Union[int, float]] = Field( - title="Default Capacity", - description="The default maximum capacity of the container. If None, the container has no capacity limit.", - default=None, - ) - resizeable: bool = Field( - default=False, - title="Resizeable", - description="Whether containers of this type support different sizes. If True, the container can be resized. If False, the container is fixed size.", - ) - default_children: Optional[ - Union[list["ResourceDefinition"], dict[str, "ResourceDefinition"]] - ] = Field( - default=None, - title="Default Children", - description="The default children to create when populating the container. Takes precedence over default_child_template.", - ) - default_child_template: Optional[list["ResourceDefinition"]] = Field( - default=None, - title="Default Child Template", - description="The default template for children to create when populating the container.", - ) - base_type: Literal[AssetType.container] = Field( - default=AssetType.container, - title="Container Base Type", - description="The base type of the container.", - ) - - -class AssetResourceTypeDefinition(ResourceTypeDefinition): - """Definition for a MADSci Asset Resource Type.""" - - base_type: Literal[ResourceType.asset] = Field( - default=ResourceType.asset, - title="Asset Base Type", - description="The base type of the asset.", - ) - - -class ConsumableResourceTypeDefinition(ResourceTypeDefinition): - """Definition for a MADSci Consumable Resource Type.""" - - base_type: Literal[ResourceType.consumable] = Field( - default=ResourceType.consumable, - title="Consumable Base Type", - description="The base type of the consumable.", - ) - - -class DiscreteConsumableResourceTypeDefinition(ConsumableResourceTypeDefinition): - """Definition for a MADSci Discrete Consumable Resource Type.""" - - base_type: Literal[ConsumableType.discrete_consumable] = Field( - default=ConsumableType.discrete_consumable, - title="Discrete Consumable Base Type", - description="The base type of the discrete consumable.", - ) - - -class ContinuousConsumableResourceTypeDefinition(ConsumableResourceTypeDefinition): - """Definition for a MADSci Continuous Consumable Resource Type.""" - - base_type: Literal[ConsumableType.continuous_consumable] = Field( - default=ConsumableType.continuous_consumable, - title="Continuous Consumable Base Type", - description="The base type of the continuous consumable.", - ) - - -class StackResourceTypeDefinition(ContainerResourceTypeDefinition): - """Definition for a MADSci Stack Resource Type.""" - - default_child_quantity: Optional[int] = Field( - default=None, - title="Default Child Quantity", - description="The default number of children to create when populating the container. If None, the container will be populated with a single child.", - ) - base_type: Literal[ContainerType.stack] = Field( - default=ContainerType.stack, - title="Stack Base Type", - description="The base type of the stack.", - ) - - -class QueueResourceTypeDefinition(ContainerResourceTypeDefinition): - """Definition for a MADSci Queue Resource Type.""" - - default_child_quantity: Optional[int] = Field( - default=None, - title="Default Child Quantity", - description="The default number of children to create when populating the container. If None, the container will be populated with a single child.", - ) - base_type: Literal[ContainerType.queue] = Field( - default=ContainerType.queue, - title="Queue Base Type", - description="The base type of the queue.", - ) - - -class CollectionResourceTypeDefinition(ContainerResourceTypeDefinition): - """Definition for a MADSci Collection Resource Type.""" - - keys: Optional[list[str]] = Field( - title="Collection Keys", - description="The keys of the collection.", - ) - default_children: Optional[ - Union[list["ResourceDefinition"], dict[str, "ResourceDefinition"]] - ] = Field( - default=None, - title="Default Children", - description="The default children to create when populating the container.", - ) - - @field_validator("keys", mode="before") - @classmethod - def validate_keys(cls, v: Union[int, list[str]]) -> list[str]: - """Convert integer keys count to 1-indexed range.""" - if isinstance(v, int): - return [str(i) for i in range(1, v + 1)] - return v - - base_type: Literal[ContainerType.collection] = Field( - default=ContainerType.collection, - title="Collection Base Type", - description="The base type of the collection.", - ) - - -class GridResourceTypeDefinition(ContainerResourceTypeDefinition): - """Definition for a MADSci Grid Resource Type.""" - - rows: list[str] = Field( - title="Grid Rows", - description="The row labels for the grid.", - ) - columns: list[str] = Field( - title="Grid Columns", - description="The column labels for the grid.", - ) - - @field_validator("columns", "rows", mode="before") - @classmethod - def validate_keys(cls, v: Union[int, list[str]]) -> list[str]: - """Convert integer keys count to 1-indexed range.""" - if isinstance(v, int): - return [str(i) for i in range(1, v + 1)] - return v - - base_type: Literal[ContainerType.grid] = Field( - default=ContainerType.grid, - title="Grid Base Type", - description="The base type of the grid.", - ) - - -class VoxelGridResourceTypeDefinition(GridResourceTypeDefinition): - """Definition for a MADSci Voxel Grid Resource Type.""" - - capacity: Optional[int] = Field( - title="Collection Capacity", - description="The maximum capacity of each element in the grid.", - ) - planes: list[str] = Field( - title="Voxel Grid Planes", - description="The keys of the planes in the grid.", - ) - - @field_validator("columns", "rows", mode="before") - @classmethod - def validate_keys(cls, v: Union[int, list[str]]) -> list[str]: - """Convert integer keys count to 1-indexed range.""" - if isinstance(v, int): - return [str(i) for i in range(1, v + 1)] - return v - - base_type: Literal[ContainerType.voxel_grid] = Field( - default=ContainerType.voxel_grid, - title="Voxel Grid Base Type", - description="The base type of the voxel grid.", - ) - - -class PoolResourceTypeDefinition(ContainerResourceTypeDefinition): - """Definition for a MADSci Pool Resource Type.""" - - base_type: Literal[ContainerType.pool] = Field( - default=ContainerType.pool, - title="Pool Base Type", - description="The base type of the pool.", - ) - - -class ResourceDefinition(BaseModel, extra="allow"): - """Definition for a MADSci Resource.""" - - model_config = ConfigDict(extra="allow") - - resource_name: str = Field( - title="Resource Name", - description="The name of the resource.", - ) - resource_type: str = Field( - title="Resource Type", - description="The type of the resource.", - ) - base_type: Optional[str] = Field( - default=None, - title="Resource Base Type", - description="The base type of the resource.", - ) - resource_description: Optional[str] = Field( - default=None, - title="Resource Description", - description="A description of the resource.", - ) - resource_id: str = Field( - title="Resource ID", - description="The ID of the resource.", - default_factory=new_ulid_str, - ) - parent: Optional[str] = Field( - default=None, - title="Parent Resource", - description="The parent resource ID or name. If None, defaults to the owning module or workcell.", - ) - attributes: dict[str, Json] = Field( - title="Resource Attributes", - description="Additional attributes for the resource.", - default_factory=dict, - ) - - is_ulid = field_validator("resource_id")(ulid_validator) - - -class AssetResourceDefinition(ResourceDefinition): - """Definition for an asset resource.""" - - -class ConsumableResourceDefinition(ResourceDefinition): - """Definition for a consumable resource.""" - - -class DiscreteConsumableResourceDefinition(ConsumableResourceDefinition): - """Definition for a discrete consumable resource.""" - - -class ContinuousConsumableResourceDefinition(ConsumableResourceDefinition): - """Definition for a continuous consumable resource.""" - - -class ContainerResourceDefinition(ResourceDefinition): - """Definition for a container resource.""" - - capacity: Optional[Union[int, float]] = Field( - default=None, - title="Container Capacity", - description="The capacity of the container. If None, uses the type's default_capacity.", - ) - default_children: Optional[ - Union[list[ResourceDefinition], dict[str, ResourceDefinition]] - ] = Field( - default=None, - title="Default Children", - description="The default children to create when initializing the container. If None, use the type's default_children.", - ) - default_child_template: Optional[ResourceDefinition] = Field( - default=None, - title="Default Child Template", - description="Template for creating child resources, supporting variable substitution. If None, use the type's default_child_template.", - ) - - -class CollectionResourceDefinition(ContainerResourceDefinition): - """Definition for a collection resource. Collections are used for resources that have a number of children, each with a unique key, which can be randomly accessed.""" - - keys: Optional[Union[int, list[str]]] = Field( - default=None, - title="Collection Keys", - description="The keys for the collection. Can be an integer (converted to 1-based range) or explicit list.", - ) - default_children: Optional[ - Union[list[ResourceDefinition], dict[str, ResourceDefinition]] - ] = Field( - default=None, - title="Default Children", - description="The default children to create when initializing the collection. If None, use the type's default_children.", - ) - - @field_validator("keys", mode="before") - @classmethod - def validate_keys(cls, v: Union[int, list[str], None]) -> Optional[list[str]]: - """Convert integer keys to 1-based range if needed.""" - if isinstance(v, int): - return [str(i) for i in range(1, v + 1)] - return v - - -class GridResourceDefinition(ContainerResourceDefinition): - """Definition for a grid resource. Grids are 2D grids of resources. They are treated as nested collections (i.e. Collection[Collection[Resource]]).""" - - default_children: Optional[dict[str, dict[str, ResourceDefinition]]] = Field( - default=None, - title="Default Children", - description="The default children to create when initializing the collection. If None, use the type's default_children.", - ) - - -class VoxelGridResourceDefinition(GridResourceDefinition): - """Definition for a voxel grid resource. Voxel grids are 3D grids of resources. They are treated as nested collections (i.e. Collection[Collection[Collection[Resource]]]).""" - - default_children: Optional[dict[str, dict[str, dict[str, ResourceDefinition]]]] = ( - Field( - default=None, - title="Default Children", - description="The default children to create when initializing the collection. If None, use the type's default_children.", - ) - ) - - -class StackResourceDefinition(ContainerResourceDefinition): - """Definition for a stack resource.""" - - default_child_quantity: Optional[int] = Field( - default=None, - title="Default Child Quantity", - description="The number of children to create by default. If None, use the type's default_child_quantity.", - ) - - -class QueueResourceDefinition(ContainerResourceDefinition): - """Definition for a queue resource.""" - - default_child_quantity: Optional[int] = Field( - default=None, - title="Default Child Quantity", - description="The number of children to create by default. If None, use the type's default_child_quantity.", - ) - - -class PoolResourceDefinition(ContainerResourceDefinition): - """Definition for a pool resource. Pool resources are collections of consumables with no structure (used for wells, reservoirs, etc.).""" - - -ResourceTypeDefinitions = Union[ - ResourceTypeDefinition, - ContainerResourceTypeDefinition, # * container of resources: Container[Resource] - AssetResourceTypeDefinition, # * trackable resource: Asset - ConsumableResourceTypeDefinition, # * consumable resource: Consumable - StackResourceTypeDefinition, # * stack of resources: Container[Resource] - QueueResourceTypeDefinition, # * queue of resources: Container[Resource] - CollectionResourceTypeDefinition, # * collection of resources: Container[Resource] - GridResourceTypeDefinition, # * 2D grid of resources: Collection[Collection[Resource]] - VoxelGridResourceTypeDefinition, # * 3D grid of resources: Collection[Collection[Collection[Resource]]] - PoolResourceTypeDefinition, # * collection of consumables with no structure: Collection[Consumable] -] - -ResourceDefinitions = Union[ - Annotated[ResourceDefinition, Tag("resource")], - Annotated[AssetResourceDefinition, Tag("asset")], - Annotated[ContainerResourceDefinition, Tag("container")], - Annotated[CollectionResourceDefinition, Tag("collection")], - Annotated[GridResourceDefinition, Tag("grid")], - Annotated[VoxelGridResourceDefinition, Tag("voxel_grid")], - Annotated[StackResourceDefinition, Tag("stack")], - Annotated[QueueResourceDefinition, Tag("queue")], - Annotated[PoolResourceDefinition, Tag("pool")], -] - - -def discriminate_default_resources( - v: Union[ResourceDefinitions, dict[str, Any]], -) -> ResourceDefinitions: - """Discriminate default resources. If the resource type is not explicitly defined, default to 'resource'.""" - if isinstance(v, dict): - if v.get("resource_type") in RESOURCE_DEFINITION_MAP: - return v.get("resource_type") - return "resource" - if v.resource_type in RESOURCE_DEFINITION_MAP: - return v.resource_type - return "resource" - - -class ResourceFile(BaseModel): - """Definition for a MADSci Resource File.""" - - resource_types: list[ - Annotated[ResourceTypeDefinitions, Field(discriminator="base_type")] - ] = Field( - title="Resource Types", - description="The definitions of the resource types in the file.", - default=[], - ) - default_resources: list[ - Annotated[ResourceDefinitions, Discriminator(discriminate_default_resources)] - ] = Field( - title="Default Resources", - description="The definitions of the default resources in the file.", - default=[], - ) - - @model_validator(mode="after") - def validate_resource_types(self) -> "ResourceFile": - """Validate resource types.""" - for resource_type in self.resource_types: - for parent_type in resource_type.parent_types: - if ( - parent_type not in RESOURCE_TYPE_DEFINITION_MAP - and parent_type - not in [ - resource_type.type_name for resource_type in self.resource_types - ] - ): - raise ValueError( - f"Unknown resource parent type: {parent_type}, parent type must be one of {RESOURCE_TYPE_DEFINITION_MAP.keys()} or a defined resource type.", - ) - return self - - @model_validator(mode="after") - def validate_default_resources(self) -> "ResourceFile": - """Validate default resources and their resource types.""" - default_resources = [] - for resource in self.default_resources: - if resource.resource_type not in RESOURCE_DEFINITION_MAP: - resource_type = next( - ( - resource_type - for resource_type in self.resource_types - if resource_type.type_name == resource.resource_type - ), - None, - ) - if resource_type is None: - default_resources.append(resource) - else: - default_resources.append( - RESOURCE_DEFINITION_MAP[resource_type.base_type].model_validate( - resource, - ), - ) - else: - default_resources.append(resource) - self.__dict__["default_resources"] = default_resources - return self - - -RESOURCE_BASE_TYPES = [ - ResourceType.resource, - ResourceType.asset, - ResourceType.consumable, - ConsumableType.discrete_consumable, - ConsumableType.continuous_consumable, - AssetType.container, - ContainerType.stack, - ContainerType.queue, - ContainerType.collection, - ContainerType.grid, - ContainerType.voxel_grid, - ContainerType.pool, -] - -RESOURCE_TYPE_DEFINITION_MAP: dict[str, type[ResourceTypeDefinition]] = { - ResourceType.resource: ResourceTypeDefinition, - ResourceType.asset: AssetResourceTypeDefinition, - AssetType.container: ContainerResourceTypeDefinition, - ResourceType.consumable: ConsumableResourceTypeDefinition, - ConsumableType.discrete_consumable: DiscreteConsumableResourceTypeDefinition, - ConsumableType.continuous_consumable: ContinuousConsumableResourceTypeDefinition, - ContainerType.stack: StackResourceTypeDefinition, - ContainerType.queue: QueueResourceTypeDefinition, - ContainerType.collection: CollectionResourceTypeDefinition, - ContainerType.grid: GridResourceTypeDefinition, - ContainerType.voxel_grid: VoxelGridResourceTypeDefinition, - ContainerType.pool: PoolResourceTypeDefinition, -} - -RESOURCE_DEFINITION_MAP: dict[str, type[ResourceDefinition]] = { - ResourceType.resource: ResourceDefinition, - ResourceType.asset: AssetResourceDefinition, - AssetType.container: ContainerResourceDefinition, - ResourceType.consumable: ConsumableResourceDefinition, - ConsumableType.discrete_consumable: DiscreteConsumableResourceDefinition, - ConsumableType.continuous_consumable: ContinuousConsumableResourceDefinition, - ContainerType.stack: StackResourceDefinition, - ContainerType.queue: QueueResourceDefinition, - ContainerType.collection: CollectionResourceDefinition, - ContainerType.grid: GridResourceDefinition, - ContainerType.voxel_grid: VoxelGridResourceDefinition, - ContainerType.pool: PoolResourceDefinition, -} - - -class ResourceBase(ResourceDefinition, extra="allow"): - """Base class for all MADSci Resources.""" - - resource_url: str = Field( - title="Resource URL", - description="The URL of the resource.", - ) - ownership: OwnershipInfo = Field( - title="Ownership", - description="Information about the ownership of the resource.", - default_factory=OwnershipInfo, - ) - - -class AssetBase(AssetResourceDefinition): - """Base class for all MADSci Assets.""" - - -class ConsumableBase(ResourceBase): - """Base class for all MADSci Consumables.""" - - quantity: Optional[Union[int, float]] = Field( - title="Quantity", - description="The quantity of the consumable.", - ) - - -class DiscreteConsumableBase(ConsumableBase): - """Base class for all MADSci Discrete Consumables.""" - - quantity: int = Field( - title="Quantity", - description="The quantity of the discrete consumable.", - ) - - -class ContinuousConsumableBase(ConsumableBase): - """Base class for all MADSci Continuous Consumables.""" - - quantity: float = Field( - title="Quantity", - description="The quantity of the continuous consumable.", - ) - - -class ContainerBase(ResourceBase): - """Base class for all MADSci Containers.""" - - children: list[ResourceBase] = Field( - title="Children", - description="The children of the container.", - ) - capacity: Optional[int] = Field( - title="Capacity", - description="The capacity of the container.", - ) - - -class CollectionBase(ContainerBase): - """Base class for all MADSci Collections.""" - - children: dict[str, ResourceBase] = Field( - title="Keys", - description="The keys of the collection.", - ) - - -class GridBase(ContainerBase): - """Base class for all MADSci Grids.""" - - children: dict[str, dict[str, ResourceBase]] = Field( - title="Children", - description="The children of the grid.", - ) - - -class VoxelGridBase(GridBase): - """Base class for all MADSci Voxel Grids.""" - - children: dict[str, dict[str, dict[str, ResourceBase]]] = Field( - title="Children", - description="The children of the voxel grid.", - ) - - -class StackBase(ContainerBase): - """Base class for all MADSci Stacks.""" - - -class QueueBase(ContainerBase): - """Base class for all MADSci Queues.""" - - -class PoolBase(ContainerBase): - """Base class for all MADSci Pools.""" - - children: dict[str, ConsumableBase] = Field( - title="Children", - description="The children of the pool.", - ) - capacity: Optional[Union[int, float]] = Field( - title="Capacity", - description="The capacity of the pool.", - ) +"""Types related to MADSci Resources.""" + +from enum import Enum +from typing import Annotated, Any, Literal, Optional, Union + +from pydantic import Json +from pydantic.config import ConfigDict +from pydantic.functional_validators import field_validator, model_validator +from pydantic.types import Discriminator, Tag +from sqlmodel.main import Field + +from madsci.common.types.auth_types import OwnershipInfo +from madsci.common.types.base_types import BaseModel, new_ulid_str +from madsci.common.types.validators import ulid_validator + + +class ResourceType(str, Enum): + """Type for a MADSci Resource.""" + + resource = "resource" + """The root resource type. Used when a resource type is not known or any resource type is acceptable.""" + asset = "asset" + consumable = "consumable" + + +class AssetType(str, Enum): + """Type for a MADSci Asset.""" + + container = "container" + asset = "asset" + + +class ConsumableType(str, Enum): + """Type for a MADSci Consumable.""" + + discrete_consumable = "discrete_consumable" + continuous_consumable = "continuous_consumable" + + +class ContainerType(str, Enum): + """Type for a MADSci Container.""" + + stack = "stack" + queue = "queue" + collection = "collection" + grid = "grid" + voxel_grid = "voxel_grid" + pool = "pool" + + +ResourceTypes = Union[ResourceType, AssetType, ContainerType, ConsumableType] + + +class ResourceTypeDefinition(BaseModel): + """Definition for a MADSci Resource Type.""" + + model_config = ConfigDict(extra="allow") + + type_name: str = Field( + title="Resource Type Name", + description="The name of the type of resource (i.e. 'plate_96_well_corningware', 'tube_rack_24', etc.).", + ) + type_description: str = Field( + title="Resource Type Description", + description="A description of the custom type of the resource.", + ) + base_type: Literal[ResourceType.resource] = Field( + default=ResourceType.resource, + title="Resource Base Type", + description="The base type of the resource.", + ) + parent_types: list[str] = Field( + default=["resource"], + title="Resource Parent Types", + description="The parent types of the resource.", + ) + custom_attributes: Optional[list["CustomResourceAttributeDefinition"]] = Field( + default=None, + title="Custom Attributes", + description="Custom attributes used by resources of this type.", + ) + + @field_validator("parent_types", mode="before") + @classmethod + def validate_parent_types(cls, v: Union[list[str], str]) -> list[str]: + """Validate parent types.""" + if isinstance(v, str): + return [v] + return v + + +class CustomResourceAttributeDefinition(BaseModel, extra="allow"): + """Definition for a MADSci Custom Resource Attribute.""" + + attribute_name: str = Field( + title="Attribute Name", + description="The name of the attribute.", + ) + attribute_description: Optional[str] = Field( + default=None, + title="Attribute Description", + description="A description of the attribute.", + ) + optional: bool = Field( + default=False, + title="Optional", + description="Whether the attribute is optional.", + ) + default_value: Json[Any] = Field( + default=None, + title="Default Value", + description="The default value of the attribute.", + ) + + +class ContainerResourceTypeDefinition(ResourceTypeDefinition): + """Definition for a MADSci Container Resource Type.""" + + supported_child_types: list[str] = Field( + title="Supported Child Types", + description="The resource types for children supported by the container. If `resource` is included, the container can contain any resource type.", + ) + default_capacity: Optional[Union[int, float]] = Field( + title="Default Capacity", + description="The default maximum capacity of the container. If None, the container has no capacity limit.", + default=None, + ) + resizeable: bool = Field( + default=False, + title="Resizeable", + description="Whether containers of this type support different sizes. If True, the container can be resized. If False, the container is fixed size.", + ) + default_children: Optional[ + Union[list["ResourceDefinition"], dict[str, "ResourceDefinition"]] + ] = Field( + default=None, + title="Default Children", + description="The default children to create when populating the container. Takes precedence over default_child_template.", + ) + default_child_template: Optional[list["ResourceDefinition"]] = Field( + default=None, + title="Default Child Template", + description="The default template for children to create when populating the container.", + ) + base_type: Literal[AssetType.container] = Field( + default=AssetType.container, + title="Container Base Type", + description="The base type of the container.", + ) + + +class AssetResourceTypeDefinition(ResourceTypeDefinition): + """Definition for a MADSci Asset Resource Type.""" + + base_type: Literal[ResourceType.asset] = Field( + default=ResourceType.asset, + title="Asset Base Type", + description="The base type of the asset.", + ) + + +class ConsumableResourceTypeDefinition(ResourceTypeDefinition): + """Definition for a MADSci Consumable Resource Type.""" + + base_type: Literal[ResourceType.consumable] = Field( + default=ResourceType.consumable, + title="Consumable Base Type", + description="The base type of the consumable.", + ) + + +class DiscreteConsumableResourceTypeDefinition(ConsumableResourceTypeDefinition): + """Definition for a MADSci Discrete Consumable Resource Type.""" + + base_type: Literal[ConsumableType.discrete_consumable] = Field( + default=ConsumableType.discrete_consumable, + title="Discrete Consumable Base Type", + description="The base type of the discrete consumable.", + ) + + +class ContinuousConsumableResourceTypeDefinition(ConsumableResourceTypeDefinition): + """Definition for a MADSci Continuous Consumable Resource Type.""" + + base_type: Literal[ConsumableType.continuous_consumable] = Field( + default=ConsumableType.continuous_consumable, + title="Continuous Consumable Base Type", + description="The base type of the continuous consumable.", + ) + + +class StackResourceTypeDefinition(ContainerResourceTypeDefinition): + """Definition for a MADSci Stack Resource Type.""" + + default_child_quantity: Optional[int] = Field( + default=None, + title="Default Child Quantity", + description="The default number of children to create when populating the container. If None, the container will be populated with a single child.", + ) + base_type: Literal[ContainerType.stack] = Field( + default=ContainerType.stack, + title="Stack Base Type", + description="The base type of the stack.", + ) + + +class QueueResourceTypeDefinition(ContainerResourceTypeDefinition): + """Definition for a MADSci Queue Resource Type.""" + + default_child_quantity: Optional[int] = Field( + default=None, + title="Default Child Quantity", + description="The default number of children to create when populating the container. If None, the container will be populated with a single child.", + ) + base_type: Literal[ContainerType.queue] = Field( + default=ContainerType.queue, + title="Queue Base Type", + description="The base type of the queue.", + ) + + +class CollectionResourceTypeDefinition(ContainerResourceTypeDefinition): + """Definition for a MADSci Collection Resource Type.""" + + keys: Optional[list[str]] = Field( + title="Collection Keys", + description="The keys of the collection.", + ) + default_children: Optional[ + Union[list["ResourceDefinition"], dict[str, "ResourceDefinition"]] + ] = Field( + default=None, + title="Default Children", + description="The default children to create when populating the container.", + ) + + @field_validator("keys", mode="before") + @classmethod + def validate_keys(cls, v: Union[int, list[str]]) -> list[str]: + """Convert integer keys count to 1-indexed range.""" + if isinstance(v, int): + return [str(i) for i in range(1, v + 1)] + return v + + base_type: Literal[ContainerType.collection] = Field( + default=ContainerType.collection, + title="Collection Base Type", + description="The base type of the collection.", + ) + + +class GridResourceTypeDefinition(ContainerResourceTypeDefinition): + """Definition for a MADSci Grid Resource Type.""" + + rows: list[str] = Field( + title="Grid Rows", + description="The row labels for the grid.", + ) + columns: list[str] = Field( + title="Grid Columns", + description="The column labels for the grid.", + ) + + @field_validator("columns", "rows", mode="before") + @classmethod + def validate_keys(cls, v: Union[int, list[str]]) -> list[str]: + """Convert integer keys count to 1-indexed range.""" + if isinstance(v, int): + return [str(i) for i in range(1, v + 1)] + return v + + base_type: Literal[ContainerType.grid] = Field( + default=ContainerType.grid, + title="Grid Base Type", + description="The base type of the grid.", + ) + + +class VoxelGridResourceTypeDefinition(GridResourceTypeDefinition): + """Definition for a MADSci Voxel Grid Resource Type.""" + + capacity: Optional[int] = Field( + title="Collection Capacity", + description="The maximum capacity of each element in the grid.", + ) + planes: list[str] = Field( + title="Voxel Grid Planes", + description="The keys of the planes in the grid.", + ) + + @field_validator("columns", "rows", mode="before") + @classmethod + def validate_keys(cls, v: Union[int, list[str]]) -> list[str]: + """Convert integer keys count to 1-indexed range.""" + if isinstance(v, int): + return [str(i) for i in range(1, v + 1)] + return v + + base_type: Literal[ContainerType.voxel_grid] = Field( + default=ContainerType.voxel_grid, + title="Voxel Grid Base Type", + description="The base type of the voxel grid.", + ) + + +class PoolResourceTypeDefinition(ContainerResourceTypeDefinition): + """Definition for a MADSci Pool Resource Type.""" + + base_type: Literal[ContainerType.pool] = Field( + default=ContainerType.pool, + title="Pool Base Type", + description="The base type of the pool.", + ) + + +class ResourceDefinition(BaseModel, extra="allow"): + """Definition for a MADSci Resource.""" + + model_config = ConfigDict(extra="allow") + + resource_name: str = Field( + title="Resource Name", + description="The name of the resource.", + ) + resource_type: str = Field( + title="Resource Type", + description="The type of the resource.", + ) + base_type: Optional[str] = Field( + default=None, + title="Resource Base Type", + description="The base type of the resource.", + ) + resource_description: Optional[str] = Field( + default=None, + title="Resource Description", + description="A description of the resource.", + ) + resource_id: str = Field( + title="Resource ID", + description="The ID of the resource.", + default_factory=new_ulid_str, + ) + parent: Optional[str] = Field( + default=None, + title="Parent Resource", + description="The parent resource ID or name. If None, defaults to the owning module or workcell.", + ) + attributes: dict[str, Json] = Field( + title="Resource Attributes", + description="Additional attributes for the resource.", + default_factory=dict, + ) + + is_ulid = field_validator("resource_id")(ulid_validator) + + +class AssetResourceDefinition(ResourceDefinition): + """Definition for an asset resource.""" + + +class ConsumableResourceDefinition(ResourceDefinition): + """Definition for a consumable resource.""" + + +class DiscreteConsumableResourceDefinition(ConsumableResourceDefinition): + """Definition for a discrete consumable resource.""" + + +class ContinuousConsumableResourceDefinition(ConsumableResourceDefinition): + """Definition for a continuous consumable resource.""" + + +class ContainerResourceDefinition(ResourceDefinition): + """Definition for a container resource.""" + + capacity: Optional[Union[int, float]] = Field( + default=None, + title="Container Capacity", + description="The capacity of the container. If None, uses the type's default_capacity.", + ) + default_children: Optional[ + Union[list[ResourceDefinition], dict[str, ResourceDefinition]] + ] = Field( + default=None, + title="Default Children", + description="The default children to create when initializing the container. If None, use the type's default_children.", + ) + default_child_template: Optional[ResourceDefinition] = Field( + default=None, + title="Default Child Template", + description="Template for creating child resources, supporting variable substitution. If None, use the type's default_child_template.", + ) + + +class CollectionResourceDefinition(ContainerResourceDefinition): + """Definition for a collection resource. Collections are used for resources that have a number of children, each with a unique key, which can be randomly accessed.""" + + keys: Optional[Union[int, list[str]]] = Field( + default=None, + title="Collection Keys", + description="The keys for the collection. Can be an integer (converted to 1-based range) or explicit list.", + ) + default_children: Optional[ + Union[list[ResourceDefinition], dict[str, ResourceDefinition]] + ] = Field( + default=None, + title="Default Children", + description="The default children to create when initializing the collection. If None, use the type's default_children.", + ) + + @field_validator("keys", mode="before") + @classmethod + def validate_keys(cls, v: Union[int, list[str], None]) -> Optional[list[str]]: + """Convert integer keys to 1-based range if needed.""" + if isinstance(v, int): + return [str(i) for i in range(1, v + 1)] + return v + + +class GridResourceDefinition(ContainerResourceDefinition): + """Definition for a grid resource. Grids are 2D grids of resources. They are treated as nested collections (i.e. Collection[Collection[Resource]]).""" + + default_children: Optional[dict[str, dict[str, ResourceDefinition]]] = Field( + default=None, + title="Default Children", + description="The default children to create when initializing the collection. If None, use the type's default_children.", + ) + + +class VoxelGridResourceDefinition(GridResourceDefinition): + """Definition for a voxel grid resource. Voxel grids are 3D grids of resources. They are treated as nested collections (i.e. Collection[Collection[Collection[Resource]]]).""" + + default_children: Optional[dict[str, dict[str, dict[str, ResourceDefinition]]]] = ( + Field( + default=None, + title="Default Children", + description="The default children to create when initializing the collection. If None, use the type's default_children.", + ) + ) + + +class StackResourceDefinition(ContainerResourceDefinition): + """Definition for a stack resource.""" + + default_child_quantity: Optional[int] = Field( + default=None, + title="Default Child Quantity", + description="The number of children to create by default. If None, use the type's default_child_quantity.", + ) + + +class QueueResourceDefinition(ContainerResourceDefinition): + """Definition for a queue resource.""" + + default_child_quantity: Optional[int] = Field( + default=None, + title="Default Child Quantity", + description="The number of children to create by default. If None, use the type's default_child_quantity.", + ) + + +class PoolResourceDefinition(ContainerResourceDefinition): + """Definition for a pool resource. Pool resources are collections of consumables with no structure (used for wells, reservoirs, etc.).""" + + +ResourceTypeDefinitions = Union[ + ResourceTypeDefinition, + ContainerResourceTypeDefinition, # * container of resources: Container[Resource] + AssetResourceTypeDefinition, # * trackable resource: Asset + ConsumableResourceTypeDefinition, # * consumable resource: Consumable + StackResourceTypeDefinition, # * stack of resources: Container[Resource] + QueueResourceTypeDefinition, # * queue of resources: Container[Resource] + CollectionResourceTypeDefinition, # * collection of resources: Container[Resource] + GridResourceTypeDefinition, # * 2D grid of resources: Collection[Collection[Resource]] + VoxelGridResourceTypeDefinition, # * 3D grid of resources: Collection[Collection[Collection[Resource]]] + PoolResourceTypeDefinition, # * collection of consumables with no structure: Collection[Consumable] +] + +ResourceDefinitions = Union[ + Annotated[ResourceDefinition, Tag("resource")], + Annotated[AssetResourceDefinition, Tag("asset")], + Annotated[ContainerResourceDefinition, Tag("container")], + Annotated[CollectionResourceDefinition, Tag("collection")], + Annotated[GridResourceDefinition, Tag("grid")], + Annotated[VoxelGridResourceDefinition, Tag("voxel_grid")], + Annotated[StackResourceDefinition, Tag("stack")], + Annotated[QueueResourceDefinition, Tag("queue")], + Annotated[PoolResourceDefinition, Tag("pool")], +] + + +def discriminate_default_resources( + v: Union[ResourceDefinitions, dict[str, Any]], +) -> ResourceDefinitions: + """Discriminate default resources. If the resource type is not explicitly defined, default to 'resource'.""" + if isinstance(v, dict): + if v.get("resource_type") in RESOURCE_DEFINITION_MAP: + return v.get("resource_type") + return "resource" + if v.resource_type in RESOURCE_DEFINITION_MAP: + return v.resource_type + return "resource" + + +class ResourceFile(BaseModel): + """Definition for a MADSci Resource File.""" + + resource_types: list[ + Annotated[ResourceTypeDefinitions, Field(discriminator="base_type")] + ] = Field( + title="Resource Types", + description="The definitions of the resource types in the file.", + default=[], + ) + default_resources: list[ + Annotated[ResourceDefinitions, Discriminator(discriminate_default_resources)] + ] = Field( + title="Default Resources", + description="The definitions of the default resources in the file.", + default=[], + ) + + @model_validator(mode="after") + def validate_resource_types(self) -> "ResourceFile": + """Validate resource types.""" + for resource_type in self.resource_types: + for parent_type in resource_type.parent_types: + if ( + parent_type not in RESOURCE_TYPE_DEFINITION_MAP + and parent_type + not in [ + resource_type.type_name for resource_type in self.resource_types + ] + ): + raise ValueError( + f"Unknown resource parent type: {parent_type}, parent type must be one of {RESOURCE_TYPE_DEFINITION_MAP.keys()} or a defined resource type.", + ) + return self + + @model_validator(mode="after") + def validate_default_resources(self) -> "ResourceFile": + """Validate default resources and their resource types.""" + default_resources = [] + for resource in self.default_resources: + if resource.resource_type not in RESOURCE_DEFINITION_MAP: + resource_type = next( + ( + resource_type + for resource_type in self.resource_types + if resource_type.type_name == resource.resource_type + ), + None, + ) + if resource_type is None: + default_resources.append(resource) + else: + default_resources.append( + RESOURCE_DEFINITION_MAP[resource_type.base_type].model_validate( + resource, + ), + ) + else: + default_resources.append(resource) + self.__dict__["default_resources"] = default_resources + return self + + +RESOURCE_BASE_TYPES = [ + ResourceType.resource, + ResourceType.asset, + ResourceType.consumable, + ConsumableType.discrete_consumable, + ConsumableType.continuous_consumable, + AssetType.container, + ContainerType.stack, + ContainerType.queue, + ContainerType.collection, + ContainerType.grid, + ContainerType.voxel_grid, + ContainerType.pool, +] + +RESOURCE_TYPE_DEFINITION_MAP: dict[str, type[ResourceTypeDefinition]] = { + ResourceType.resource: ResourceTypeDefinition, + ResourceType.asset: AssetResourceTypeDefinition, + AssetType.container: ContainerResourceTypeDefinition, + ResourceType.consumable: ConsumableResourceTypeDefinition, + ConsumableType.discrete_consumable: DiscreteConsumableResourceTypeDefinition, + ConsumableType.continuous_consumable: ContinuousConsumableResourceTypeDefinition, + ContainerType.stack: StackResourceTypeDefinition, + ContainerType.queue: QueueResourceTypeDefinition, + ContainerType.collection: CollectionResourceTypeDefinition, + ContainerType.grid: GridResourceTypeDefinition, + ContainerType.voxel_grid: VoxelGridResourceTypeDefinition, + ContainerType.pool: PoolResourceTypeDefinition, +} + +RESOURCE_DEFINITION_MAP: dict[str, type[ResourceDefinition]] = { + ResourceType.resource: ResourceDefinition, + ResourceType.asset: AssetResourceDefinition, + AssetType.container: ContainerResourceDefinition, + ResourceType.consumable: ConsumableResourceDefinition, + ConsumableType.discrete_consumable: DiscreteConsumableResourceDefinition, + ConsumableType.continuous_consumable: ContinuousConsumableResourceDefinition, + ContainerType.stack: StackResourceDefinition, + ContainerType.queue: QueueResourceDefinition, + ContainerType.collection: CollectionResourceDefinition, + ContainerType.grid: GridResourceDefinition, + ContainerType.voxel_grid: VoxelGridResourceDefinition, + ContainerType.pool: PoolResourceDefinition, +} + + +class ResourceBase(ResourceDefinition, extra="allow"): + """Base class for all MADSci Resources.""" + + resource_url: str = Field( + title="Resource URL", + description="The URL of the resource.", + ) + ownership: OwnershipInfo = Field( + title="Ownership", + description="Information about the ownership of the resource.", + default_factory=OwnershipInfo, + ) + + +class AssetBase(AssetResourceDefinition): + """Base class for all MADSci Assets.""" + + +class ConsumableBase(ResourceBase): + """Base class for all MADSci Consumables.""" + + quantity: Optional[Union[int, float]] = Field( + title="Quantity", + description="The quantity of the consumable.", + ) + + +class DiscreteConsumableBase(ConsumableBase): + """Base class for all MADSci Discrete Consumables.""" + + quantity: int = Field( + title="Quantity", + description="The quantity of the discrete consumable.", + ) + + +class ContinuousConsumableBase(ConsumableBase): + """Base class for all MADSci Continuous Consumables.""" + + quantity: float = Field( + title="Quantity", + description="The quantity of the continuous consumable.", + ) + + +class ContainerBase(ResourceBase): + """Base class for all MADSci Containers.""" + + children: list[ResourceBase] = Field( + title="Children", + description="The children of the container.", + ) + capacity: Optional[int] = Field( + title="Capacity", + description="The capacity of the container.", + ) + + +class CollectionBase(ContainerBase): + """Base class for all MADSci Collections.""" + + children: dict[str, ResourceBase] = Field( + title="Keys", + description="The keys of the collection.", + ) + + +class GridBase(ContainerBase): + """Base class for all MADSci Grids.""" + + children: dict[str, dict[str, ResourceBase]] = Field( + title="Children", + description="The children of the grid.", + ) + + +class VoxelGridBase(GridBase): + """Base class for all MADSci Voxel Grids.""" + + children: dict[str, dict[str, dict[str, ResourceBase]]] = Field( + title="Children", + description="The children of the voxel grid.", + ) + + +class StackBase(ContainerBase): + """Base class for all MADSci Stacks.""" + + +class QueueBase(ContainerBase): + """Base class for all MADSci Queues.""" + + +class PoolBase(ContainerBase): + """Base class for all MADSci Pools.""" + + children: dict[str, ConsumableBase] = Field( + title="Children", + description="The children of the pool.", + ) + capacity: Optional[Union[int, float]] = Field( + title="Capacity", + description="The capacity of the pool.", + ) diff --git a/madsci/madsci_common/madsci/common/types/squid_types.py b/madsci/madsci_common/madsci/common/types/squid_types.py index 402c8a7..acd401d 100644 --- a/madsci/madsci_common/madsci/common/types/squid_types.py +++ b/madsci/madsci_common/madsci/common/types/squid_types.py @@ -1,137 +1,137 @@ -"""Types for MADSci Squid configuration.""" - -from enum import Enum -from typing import Any, Optional, Union - -from pydantic.functional_validators import field_validator -from pydantic.networks import AnyUrl -from sqlmodel.main import Field - -from madsci.common.types.base_types import BaseModel, PathLike, new_ulid_str -from madsci.common.types.validators import ( - alphanumeric_with_underscores_validator, - ulid_validator, -) -from madsci.common.types.workcell_types import WorkcellDefinition - - -class LabDefinition(BaseModel): - """Definition for a MADSci Lab.""" - - name: str = Field(title="Name", description="The name of the lab.") - lab_id: str = Field( - title="Lab ID", - description="The ID of the lab.", - default_factory=new_ulid_str, - ) - description: Optional[str] = Field( - default=None, - title="Description", - description="A description of the lab.", - ) - server_config: "LabServerConfig" = Field( - title="Lab Server Configuration", - default_factory=lambda: LabServerConfig(), - description="The configuration for the lab server.", - ) - workcells: dict[str, Union["WorkcellDefinition", PathLike]] = Field( - default_factory=dict, - title="Workcells", - description="The workcells in the lab. Keys are workcell names. Values are either paths to workcell definition files, or workcell definition objects.", - ) - commands: dict[str, str] = Field( - default_factory=dict, - title="Commands", - description="Commands for operating the lab.", - ) - managers: dict[str, Union["ManagerDefinition", PathLike, AnyUrl]] = Field( - default_factory=dict, - title="Squid Manager Definitions", - description="Squid Manager definitions used by the lab. Either a path to a manager definition file, a URL to a manager, or a manager definition object. If the manager definition is a URL, the server will attempt to fetch the manager definition from the URL.", - ) - - @field_validator("commands") - @classmethod - def validate_commands(cls, v: dict[str, str]) -> dict[str, str]: - """Validate the commands.""" - if v: - for command in v: - if not str.isalnum(command): - raise ValueError(f"Command '{command}' must be alphanumeric") - return v - - is_ulid = field_validator("lab_id")(ulid_validator) - - -class LabServerConfig(BaseModel, extra="allow"): - """Configuration for a MADSci Lab Server.""" - - host: str = Field( - default="127.0.0.1", - title="Server Host", - description="The hostname or IP address of the Squid Lab Server.", - ) - port: int = Field( - default=8000, - title="Server Port", - description="The port number of the Squid Lab Server.", - ) - - -class ManagerDefinition(BaseModel): - """Definition for a Squid Manager.""" - - name: str = Field( - title="Manager Name", - description="The name of this manager instance.", - ) - manager_id: Optional[str] = Field( - title="Manager ID", - description="The ID of the manager.", - default=None, - ) - description: Optional[str] = Field( - default=None, - title="Description", - description="A description of the manager.", - ) - manager_type: str = Field( - title="Manager Type", - description="The type of the manager, used by other components or managers to find matching managers.", - ) - manager_config: Optional[dict[str, Any]] = Field( - default=None, - title="Manager Configuration", - description="The configuration for the manager.", - ) - url: Optional[AnyUrl] = Field( - default=None, - title="Manager URL", - description="The URL of the manager server.", - ) - - is_alphanumeric = field_validator("manager_type")( - alphanumeric_with_underscores_validator, - ) - - -class ManagerTypes(str, Enum): - """Types of Squid Managers.""" - - WORKCELL_MANAGER = "workcell_manager" - RESOURCE_MANAGER = "resource_manager" - EVENT_MANAGER = "event_manager" - LOG_MANAGER = "log_manager" - AUTH_MANAGER = "auth_manager" - NOTIFICATION_MANAGER = "notification_manager" - DATA_MANAGER = "data_manager" - TRANSFER_MANAGER = "transfer_manager" - DASHBOARD_MANAGER = "dashboard_manager" - - @classmethod - def _missing_(cls, value: str) -> "ManagerTypes": - value = value.lower() - for member in cls: - if member.lower() == value: - return member - raise ValueError(f"Invalid ManagerTypes: {value}") +"""Types for MADSci Squid configuration.""" + +from enum import Enum +from typing import Any, Optional, Union + +from pydantic.functional_validators import field_validator +from pydantic.networks import AnyUrl +from sqlmodel.main import Field + +from madsci.common.types.base_types import BaseModel, PathLike, new_ulid_str +from madsci.common.types.validators import ( + alphanumeric_with_underscores_validator, + ulid_validator, +) +from madsci.common.types.workcell_types import WorkcellDefinition + + +class LabDefinition(BaseModel): + """Definition for a MADSci Lab.""" + + name: str = Field(title="Name", description="The name of the lab.") + lab_id: str = Field( + title="Lab ID", + description="The ID of the lab.", + default_factory=new_ulid_str, + ) + description: Optional[str] = Field( + default=None, + title="Description", + description="A description of the lab.", + ) + server_config: "LabServerConfig" = Field( + title="Lab Server Configuration", + default_factory=lambda: LabServerConfig(), + description="The configuration for the lab server.", + ) + workcells: dict[str, Union["WorkcellDefinition", PathLike]] = Field( + default_factory=dict, + title="Workcells", + description="The workcells in the lab. Keys are workcell names. Values are either paths to workcell definition files, or workcell definition objects.", + ) + commands: dict[str, str] = Field( + default_factory=dict, + title="Commands", + description="Commands for operating the lab.", + ) + managers: dict[str, Union["ManagerDefinition", PathLike, AnyUrl]] = Field( + default_factory=dict, + title="Squid Manager Definitions", + description="Squid Manager definitions used by the lab. Either a path to a manager definition file, a URL to a manager, or a manager definition object. If the manager definition is a URL, the server will attempt to fetch the manager definition from the URL.", + ) + + @field_validator("commands") + @classmethod + def validate_commands(cls, v: dict[str, str]) -> dict[str, str]: + """Validate the commands.""" + if v: + for command in v: + if not str.isalnum(command): + raise ValueError(f"Command '{command}' must be alphanumeric") + return v + + is_ulid = field_validator("lab_id")(ulid_validator) + + +class LabServerConfig(BaseModel, extra="allow"): + """Configuration for a MADSci Lab Server.""" + + host: str = Field( + default="127.0.0.1", + title="Server Host", + description="The hostname or IP address of the Squid Lab Server.", + ) + port: int = Field( + default=8000, + title="Server Port", + description="The port number of the Squid Lab Server.", + ) + + +class ManagerDefinition(BaseModel): + """Definition for a Squid Manager.""" + + name: str = Field( + title="Manager Name", + description="The name of this manager instance.", + ) + manager_id: Optional[str] = Field( + title="Manager ID", + description="The ID of the manager.", + default=None, + ) + description: Optional[str] = Field( + default=None, + title="Description", + description="A description of the manager.", + ) + manager_type: str = Field( + title="Manager Type", + description="The type of the manager, used by other components or managers to find matching managers.", + ) + manager_config: Optional[dict[str, Any]] = Field( + default=None, + title="Manager Configuration", + description="The configuration for the manager.", + ) + url: Optional[AnyUrl] = Field( + default=None, + title="Manager URL", + description="The URL of the manager server.", + ) + + is_alphanumeric = field_validator("manager_type")( + alphanumeric_with_underscores_validator, + ) + + +class ManagerTypes(str, Enum): + """Types of Squid Managers.""" + + WORKCELL_MANAGER = "workcell_manager" + RESOURCE_MANAGER = "resource_manager" + EVENT_MANAGER = "event_manager" + LOG_MANAGER = "log_manager" + AUTH_MANAGER = "auth_manager" + NOTIFICATION_MANAGER = "notification_manager" + DATA_MANAGER = "data_manager" + TRANSFER_MANAGER = "transfer_manager" + DASHBOARD_MANAGER = "dashboard_manager" + + @classmethod + def _missing_(cls, value: str) -> "ManagerTypes": + value = value.lower() + for member in cls: + if member.lower() == value: + return member + raise ValueError(f"Invalid ManagerTypes: {value}") diff --git a/madsci/madsci_common/madsci/common/types/step_types.py b/madsci/madsci_common/madsci/common/types/step_types.py index 40fd724..55af879 100644 --- a/madsci/madsci_common/madsci/common/types/step_types.py +++ b/madsci/madsci_common/madsci/common/types/step_types.py @@ -1,68 +1,92 @@ -"""Types for MADSci Steps.""" - -from datetime import datetime, timedelta -from typing import Any, Optional - -from sqlmodel.main import Field - -from madsci.common.types.action_types import ActionResult, ActionStatus -from madsci.common.types.base_types import BaseModel, PathLike, new_ulid_str - - -class StepDefinition(BaseModel): - """A definition of a step in a workflow.""" - - name: str = Field( - title="Step Name", - description="The name of the step.", - ) - description: Optional[str] = Field( - title="Step Description", - description="A description of the step.", - default=None, - ) - action: str = Field( - title="Step Action", - description="The action to perform in the step.", - ) - args: dict[str, Any] = Field( - title="Step Arguments", - description="Arguments for the step action.", - default_factory=dict, - ) - files: dict[str, PathLike] = Field( - title="Step Files", - description="Files to be used in the step.", - default_factory=dict, - ) - data_labels: dict[str, str] = Field( - title="Step Data Labels", - description="Data labels for the results of the step. Maps from the names of the outputs of the action to the names of the data labels.", - default_factory=dict, - ) - - -class Step(StepDefinition): - """A runtime representation of a step in a workflow.""" - - step_id: str = Field( - title="Step ID", - description="The ID of the step.", - default_factory=new_ulid_str, - ) - status: ActionStatus = Field( - title="Step Status", - description="The status of the step.", - default=ActionStatus.NOT_STARTED, - ) - results: dict[str, ActionResult] = Field( - title="Step Results", - description="The results of the step.", - default_factory=dict, - ) - start_time: Optional[datetime] = None - """Time the step started running""" - end_time: Optional[datetime] = None - """Time the step finished running""" - duration: Optional[timedelta] = None - """Duration of the step's run""" +"""Types for MADSci Steps.""" + +from datetime import datetime, timedelta +from typing import Any, Optional + +from sqlmodel.main import Field + +from madsci.common.types.action_types import ActionResult, ActionStatus +from madsci.common.types.base_types import BaseModel, PathLike, new_ulid_str + + +class Condition(BaseModel): + """A model for the conditions a step needs to be run""" + resource: str = Field( + title="Condition Target Resource", + description="The resource targeted by the condition", + ) + field: str = Field( + title="Condition Target Field", + description="The field in the target resource targeted by the condition", + ) + value: Any = Field( + title="Condition Target Resource", + description="The resource targeted by the condition", + ) + +class StepDefinition(BaseModel): + """A definition of a step in a workflow.""" + + name: str = Field( + title="Step Name", + description="The name of the step.", + ) + description: Optional[str] = Field( + title="Step Description", + description="A description of the step.", + default=None, + ) + action: str = Field( + title="Step Action", + description="The action to perform in the step.", + ) + node: str = Field( + title="Node Name", + description="Name of the node to run on" + ) + args: dict[str, Any] = Field( + title="Step Arguments", + description="Arguments for the step action.", + default_factory=dict, + ) + files: dict[str, PathLike] = Field( + title="Step Files", + description="Files to be used in the step.", + default_factory=dict, + ) + conditions: list[Condition] = Field( + title="Step Conditions", + description="Conditions for running the step", + default_factory=list + ) + data_labels: dict[str, str] = Field( + title="Step Data Labels", + description="Data labels for the results of the step. Maps from the names of the outputs of the action to the names of the data labels.", + default_factory=dict, + ) + + +class Step(StepDefinition): + """A runtime representation of a step in a workflow.""" + + step_id: str = Field( + title="Step ID", + description="The ID of the step.", + default_factory=new_ulid_str, + ) + status: ActionStatus = Field( + title="Step Status", + description="The status of the step.", + default=ActionStatus.NOT_STARTED, + ) + results: dict[str, ActionResult] = Field( + title="Step Results", + description="The results of the step.", + default_factory=dict, + ) + start_time: Optional[datetime] = None + """Time the step started running""" + end_time: Optional[datetime] = None + """Time the step finished running""" + duration: Optional[timedelta] = None + """Duration of the step's run""" diff --git a/madsci/madsci_common/madsci/common/types/validators.py b/madsci/madsci_common/madsci/common/types/validators.py index 9ff0d70..a02f38b 100644 --- a/madsci/madsci_common/madsci/common/types/validators.py +++ b/madsci/madsci_common/madsci/common/types/validators.py @@ -1,22 +1,22 @@ -"""Common validators for MADSci-derived types.""" - -from pydantic import ValidationInfo -from ulid import ULID - - -def ulid_validator(id: str, info: ValidationInfo) -> str: - """Validates that a string field is a valid ULID.""" - try: - ULID.from_str(id) - return id - except ValueError as e: - raise ValueError(f"Invalid ULID {id} for field {info.field_name}") from e - - -def alphanumeric_with_underscores_validator(v: str, info: ValidationInfo) -> str: - """Validates that a string field is alphanumeric with underscores.""" - if not str(v).replace("_", "").isalnum(): - raise ValueError( - f"Field {info.field_name} must contain only alphanumeric characters and underscores", - ) - return v +"""Common validators for MADSci-derived types.""" + +from pydantic import ValidationInfo +from ulid import ULID + + +def ulid_validator(id: str, info: ValidationInfo) -> str: + """Validates that a string field is a valid ULID.""" + try: + ULID.from_str(id) + return id + except ValueError as e: + raise ValueError(f"Invalid ULID {id} for field {info.field_name}") from e + + +def alphanumeric_with_underscores_validator(v: str, info: ValidationInfo) -> str: + """Validates that a string field is alphanumeric with underscores.""" + if not str(v).replace("_", "").isalnum(): + raise ValueError( + f"Field {info.field_name} must contain only alphanumeric characters and underscores", + ) + return v diff --git a/madsci/madsci_common/madsci/common/types/workcell_types.py b/madsci/madsci_common/madsci/common/types/workcell_types.py index 3e96a62..2ac1772 100644 --- a/madsci/madsci_common/madsci/common/types/workcell_types.py +++ b/madsci/madsci_common/madsci/common/types/workcell_types.py @@ -1,62 +1,91 @@ -"""Types for MADSci Workcell configuration.""" - -from typing import Optional, Union - -from pydantic.functional_validators import field_validator -from pydantic.networks import AnyUrl -from sqlmodel.main import Field - -from madsci.common.types.base_types import BaseModel, PathLike, new_ulid_str -from madsci.common.types.node_types import NodeDefinition -from madsci.common.types.validators import ulid_validator - - -class WorkcellDefinition(BaseModel, extra="allow"): - """Configuration for a MADSci Workcell.""" - - name: str = Field( - title="Workcell Name", - description="The name of the workcell.", - ) - workcell_id: str = Field( - title="Workcell ID", - description="The ID of the workcell.", - default_factory=new_ulid_str, - ) - description: Optional[str] = Field( - default=None, - title="Workcell Description", - description="A description of the workcell.", - ) - config: "WorkcellConfig" = Field( - title="Workcell Configuration", - description="The configuration for the workcell.", - default_factory=lambda: WorkcellConfig(), - ) - nodes: dict[str, Union[AnyUrl, "NodeDefinition", PathLike]] = Field( - default_factory=dict, - title="Workcell Node URLs", - description="The URL, path, or definition for each node in the workcell.", - ) - - is_ulid = field_validator("workcell_id")(ulid_validator) - - -class WorkcellConfig(BaseModel): - """Configuration for a MADSci Workcell.""" - - scheduler_update_interval: float = Field( - default=0.1, - title="Scheduler Update Interval", - description="The interval at which the scheduler runs, in seconds.", - ) - node_update_interval: float = Field( - default=1.0, - title="Node Update Interval", - description="The interval at which the workcell queries its node's states, in seconds.", - ) - auto_start: bool = Field( - default=True, - title="Auto Start", - description="Whether the workcell should automatically create a new Workcell Manager and start it when the lab starts, registering it with the Lab Server.", - ) +"""Types for MADSci Workcell configuration.""" + +from typing import Optional, Union + +from pydantic.functional_validators import field_validator +from pydantic.networks import AnyUrl +from sqlmodel.main import Field + +from madsci.common.types.base_types import BaseModel, PathLike, new_ulid_str +from madsci.common.types.node_types import NodeDefinition +from madsci.common.types.validators import ulid_validator + + +class WorkcellDefinition(BaseModel, extra="allow"): + """Configuration for a MADSci Workcell.""" + + name: str = Field( + title="Workcell Name", + description="The name of the workcell.", + ) + workcell_id: str = Field( + title="Workcell ID", + description="The ID of the workcell.", + default_factory=new_ulid_str, + ) + description: Optional[str] = Field( + default=None, + title="Workcell Description", + description="A description of the workcell.", + ) + config: "WorkcellConfig" = Field( + title="Workcell Configuration", + description="The configuration for the workcell.", + default_factory=lambda: WorkcellConfig(), + ) + nodes: dict[str, Union[AnyUrl, "NodeDefinition", PathLike]] = Field( + default_factory=dict, + title="Workcell Node URLs", + description="The URL, path, or definition for each node in the workcell.", + ) + + is_ulid = field_validator("workcell_id")(ulid_validator) + + +class WorkcellConfig(BaseModel): + """Configuration for a MADSci Workcell.""" + workcell_name: str = Field( + default="Workcell 1", + title="Name", + description="The name of the workcell.", + ) + host: str = Field( + default="127.0.0.1", + title="Host", + description="The host to run the workcell manager on.", + ) + port: int = Field( + default=8013, + title="Port", + description="The port to run the workcell manager on.", + ) + redis_host: str = Field( + default="localhost", + title="Redis Host", + description="The hostname for the redis server .", + ) + redis_port: int = Field( + default=6379, + title="Redis Port", + description="The port for the redis server.", + ) + redis_password: Union[str, None] = Field( + default=None, + title="Redis Password", + description="The password for the redis server.", + ) + scheduler_update_interval: float = Field( + default=0.1, + title="Scheduler Update Interval", + description="The interval at which the scheduler runs, in seconds.", + ) + node_update_interval: float = Field( + default=1.0, + title="Node Update Interval", + description="The interval at which the workcell queries its node's states, in seconds.", + ) + auto_start: bool = Field( + default=True, + title="Auto Start", + description="Whether the workcell should automatically create a new Workcell Manager and start it when the lab starts, registering it with the Lab Server.", + ) \ No newline at end of file diff --git a/madsci/madsci_common/madsci/common/types/workflow_types.py b/madsci/madsci_common/madsci/common/types/workflow_types.py new file mode 100644 index 0000000..5168777 --- /dev/null +++ b/madsci/madsci_common/madsci/common/types/workflow_types.py @@ -0,0 +1,150 @@ +from datetime import datetime, timedelta +from enum import Enum +from typing import Any, Optional, Union + +from pydantic import Field, field_validator + +from madsci.common.types.base_types import BaseModel, new_ulid_str +from madsci.common.types.step_types import Step + +class WorkflowStatus(str, Enum): + """Status for a workflow run""" + + NEW = "new" + """Newly created workflow run, hasn't been queued yet""" + QUEUED = "queued" + """Workflow run is queued, hasn't started yet""" + RUNNING = "running" + """Workflow is currently running a step""" + IN_PROGRESS = "in_progress" + """Workflow run has started, but is not actively running a step""" + PAUSED = "paused" + """Workflow run is paused""" + COMPLETED = "completed" + """Workflow run has completed""" + FAILED = "failed" + """Workflow run has failed""" + UNKNOWN = "unknown" + """Workflow run status is unknown""" + CANCELLED = "cancelled" + """Workflow run has been cancelled""" + + @property + def is_active(self) -> bool: + """Whether or not the workflow run is active""" + return self in [ + WorkflowStatus.NEW, + WorkflowStatus.QUEUED, + WorkflowStatus.RUNNING, + WorkflowStatus.IN_PROGRESS, + WorkflowStatus.PAUSED, + ] + + +class WorkflowParameter(BaseModel): + """container for a workflow parameter""" + + name: str + """the name of the parameter""" + default: Optional[Any] = None + """ the default value of the parameter""" + +class Metadata(BaseModel, extra="allow"): + """Metadata container""" + + author: Optional[str] = None + """Who wrote this object""" + description: Optional[str] = None + """Description of the object""" + version: Union[float, str] = "" + """Version of the object""" + +class WorkflowDefinition(BaseModel): + """Grand container that pulls all info of a workflow together""" + + name: str + """Name of the workflow""" + metadata: Metadata = Field(default_factory=Metadata) + """Information about the flow""" + parameters: Optional[list[WorkflowParameter]] = [] + """Inputs to the workflow""" + flowdef: list[Step] + """User Submitted Steps of the flow""" + + + @field_validator("flowdef", mode="after") + @classmethod + def ensure_data_label_uniqueness(cls, v: Any) -> Any: + """Ensure that the names of the arguments and files are unique""" + labels = [] + for step in v: + if step.data_labels: + for key in step.data_labels: + if step.data_labels[key] in labels: + raise ValueError("Data labels must be unique across workflow") + labels.append(step.data_labels[key]) + return v + + + + +class Workflow(WorkflowDefinition): + """Container for a workflow run""" + + label: Optional[str] = None + """Label for the workflow run""" + run_id: str = Field(default_factory=new_ulid_str) + """ID of the workflow run""" + payload: dict[str, Any] = {} + """input information for a given workflow run""" + status: WorkflowStatus = Field(default=WorkflowStatus.NEW) + """current status of the workflow""" + steps: list[Step] = [] + """WEI Processed Steps of the flow""" + experiment_id: str + """ID of the experiment this workflow is a part of""" + step_index: int = 0 + """Index of the current step""" + simulate: bool = False + """Whether or not this workflow is being simulated""" + start_time: Optional[datetime] = None + """Time the workflow started running""" + end_time: Optional[datetime] = None + """Time the workflow finished running""" + duration: Optional[timedelta] = None + """Duration of the workflow's run""" + + def get_step_by_name(self, name: str) -> Step: + """Return the step object by its name""" + for step in self.steps: + if step.name == name: + return step + raise KeyError(f"Step {name} not found in workflow run {self.run_id}") + + def get_step_by_id(self, id: str) -> Step: + """Return the step object indexed by its id""" + for step in self.steps: + if step.id == id: + return step + raise KeyError(f"Step {id} not found in workflow run {self.run_id}") + + def get_datapoint_id_by_label(self, label: str) -> str: + """Return the ID of the first datapoint with the given label in a workflow run""" + for step in self.steps: + if step.result.data: + for key in step.result.data: + if key == label: + return step.result.data[key] + raise KeyError(f"Label {label} not found in workflow run {self.run_id}") + + def get_all_datapoint_ids_by_label(self, label: str) -> list[str]: + """Return the IDs of all datapoints with the given label in a workflow run""" + ids = [] + for step in self.steps: + if step.result.data: + for key in step.result.data: + if key == label: + ids.append(step.result.data[key]) + if not ids: + raise KeyError(f"Label {label} not found in workflow run {self.run_id}") + return ids diff --git a/madsci/madsci_common/madsci/common/utils.py b/madsci/madsci_common/madsci/common/utils.py index 975be12..5d3561b 100644 --- a/madsci/madsci_common/madsci/common/utils.py +++ b/madsci/madsci_common/madsci/common/utils.py @@ -1,402 +1,402 @@ -"""Utilities for the MADSci project.""" - -import json -import sys -from pathlib import Path -from typing import Any, Optional - -from pydantic import ValidationError -from pydantic_core._pydantic_core import PydanticUndefined -from rich.console import Console - -from madsci.common.types.base_types import BaseModel, PathLike - -console = Console() - - -def to_snake_case(name: str) -> str: - """Convert a string to snake case. - - Handles conversion from camelCase and PascalCase to snake_case. - """ - import re - - name = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name) - name = re.sub("([a-z0-9])([A-Z])", r"\1_\2", name) - return name.lower().replace(" ", "_").replace("__", "_") - - -def search_for_file_pattern( - pattern: str, - start_dir: Optional[PathLike] = None, - parents: bool = True, - children: bool = True, -) -> list[str]: - """ - Search up and down the file tree for a file(s) matching a pattern. - - Args: - pattern: The pattern to search for. Standard glob patterns are supported. - start_dir: The directory to start the search in. Defaults to the current directory. - parents: Whether to search in parent directories. - children: Whether to search in subdirectories. - - Returns: - A list of paths to the files that match the pattern. - """ - - start_dir = Path.cwd() if not start_dir else Path(start_dir).resolve() - - results = [] - if children: - results.extend(Path("./").glob(str(Path("**") / pattern))) - else: - results.extend(Path.glob(str(Path("./") / pattern))) - results.extend(Path("./").glob(pattern)) - if parents: - for parent in start_dir.parents: - results.extend(Path(parent).glob(pattern)) - return results - - -def save_model(path: PathLike, model: BaseModel, overwrite_check: bool = True) -> None: - """Save a MADSci model to a YAML file, optionally with a check to overwrite if the file already exists.""" - try: - model.model_validate(model) - except ValidationError as e: - raise ValueError(f"Validation error while saving model {model}: {e}") from e - if ( - Path(path).exists() - and overwrite_check - and not prompt_yes_no(f"File already exists: {path}. Overwrite?", default="no") - ): - return - model.to_yaml(path) - - -def prompt_yes_no(prompt: str, default: str = "no", quiet: bool = False) -> bool: - """Prompt the user for a yes or no answer.""" - response = str( - prompt_for_input( - rf"{prompt} \[y/n]", - default=default, - required=False, - quiet=quiet, - ), - ).lower() - return response in ["y", "yes", "true"] - - -def prompt_for_input( - prompt: str, - default: Optional[str] = None, - required: bool = False, - quiet: bool = False, -) -> str: - """Prompt the user for input.""" - if quiet or not sys.stdin.isatty(): - if default: - return default - if required: - raise ValueError( - "No input provided and no default value specified for required option.", - ) - return None - if not required: - if default: - response = console.input(f"{prompt} (optional, default: {default}): ") - else: - response = console.input(f"{prompt} (optional): ") - if not response: - response = default - else: - response = None - while not response: - if default: - response = console.input(f"{prompt} (required, default: {default}): ") - if not response: - response = default - else: - response = console.input(f"{prompt} (required): ") - return response - - -def new_name_str(prefix: str = "") -> str: - """Generate a new random name string, optionally with a prefix. Make a random combination of an adjective and a noun. Names are not guaranteed to be unique.""" - import random - - adjectives = [ - "happy", - "clever", - "bright", - "swift", - "calm", - "bold", - "eager", - "fair", - "kind", - "proud", - "brave", - "wise", - "quick", - "sharp", - "warm", - "cool", - "fresh", - "keen", - "agile", - "gentle", - "noble", - "merry", - "lively", - "grand", - "smart", - "witty", - "jolly", - "mighty", - "steady", - "pure", - "swift", - "deft", - "sage", - "fleet", - "spry", - "bold", - ] - nouns = [ - "fox", - "owl", - "bear", - "wolf", - "hawk", - "deer", - "lion", - "tiger", - "eagle", - "whale", - "seal", - "dove", - "swan", - "crow", - "duck", - "horse", - "mouse", - "cat", - "lynx", - "puma", - "otter", - "hare", - "raven", - "crane", - "falcon", - "badger", - "marten", - "stoat", - "weasel", - "vole", - "rabbit", - "squirrel", - "raccoon", - "beaver", - "moose", - "elk", - ] - - name = f"{random.choice(adjectives)}_{random.choice(nouns)}" # noqa: S311 - if prefix: - name = f"{prefix}_{name}" - return name - - -def string_to_bool(string: str) -> bool: - """Convert a string to a boolean value.""" - from argparse import ArgumentTypeError - - if string.lower() in ("true", "t", "1", "yes", "y"): - return True - if string.lower() in ("false", "f", "0", "no", "n"): - return False - raise ArgumentTypeError(f"Invalid boolean value: {string}") - - -def prompt_from_list( - prompt: str, - options: list[str], - default: Optional[str] = None, - required: bool = False, - quiet: bool = False, -) -> str: - """Prompt the user for input from a list of options.""" - - # *Print numbered list of options - if not quiet: - for i, option in enumerate(options, 1): - console.print(f"[bold]{i}[/]. {option}") - - # *Allow selection by number or exact match - def validate_response(response: str) -> Optional[str]: - if response in options: - return response - try: - idx = int(response) - if 1 <= idx <= len(options): - return options[idx - 1] - except ValueError: - pass - return None - - while True: - try: - response = validate_response( - prompt_for_input( - prompt, - default=default, - required=required, - quiet=quiet, - ), - ) - except ValueError: - continue - else: - break - return response - - -def prompt_from_pydantic_model(model: BaseModel, prompt: str, **kwargs: Any) -> str: - """Prompt the user for input from a pydantic model. - - Args: - model: The pydantic model to prompt for - prompt: The prompt to display - **kwargs: Pre-filled values to skip prompting for - - Returns: - A dictionary of field values for the model - """ - result = {} - - # Print header for the prompts - console.print(f"\n[bold]{prompt}[/]") - - for field_name, field in model.model_fields.items(): - # Skip if value provided in kwargs - if field_name in kwargs: - result[field_name] = kwargs[field_name] - continue - - # Build field prompt - field_prompt = f"{field.title or field_name}" - - # Add type hint - type_hint = str(field.annotation).replace("typing.", "") - field_prompt += f" ({type_hint})" - - # Add description if available - if field.description: - field_prompt += f"\n{field.description}" - - # Handle basic fields - while True: - try: - response = prompt_for_input( - field_prompt, - default=field.default - if field.default != PydanticUndefined - else None, - required=field.is_required, - ) - if isinstance(response, str): - response = json.loads(response) - result[field_name] = response - except json.JSONDecodeError as e: - console.print( - f"[bold red]Invalid JSON input for field {field_name}: {e}[/]", - ) - continue - else: - break - - return result - - -def relative_path(source: Path, target: Path, walk_up: bool = True) -> Path: - """ - "Backport" of :meth:`pathlib.Path.relative_to` with ``walk_up=True`` - that's not available pre 3.12. - - Return the relative path to another path identified by the passed - arguments. If the operation is not possible (because this is not - related to the other path), raise ValueError. - - The *walk_up* parameter controls whether `..` may be used to resolve - the path. - - References: - https://github.com/python/cpython/blob/8a2baedc4bcb606da937e4e066b4b3a18961cace/Lib/pathlib/_abc.py#L244-L270 - Credit: https://github.com/p2p-ld/numpydantic/blob/66fffc49f87bfaaa2f4d05bf1730c343b10c9cc6/src/numpydantic/serialization.py#L107 - """ - if not isinstance(source, Path): - source = Path(source) - target_parts = target.parts - source_parts = source.parts - anchor0, parts0 = target_parts[0], list(reversed(target_parts[1:])) - anchor1, parts1 = source_parts[0], list(reversed(source_parts[1:])) - if anchor0 != anchor1: - raise ValueError(f"{target!r} and {source!r} have different anchors") - while parts0 and parts1 and parts0[-1] == parts1[-1]: - parts0.pop() - parts1.pop() - for part in parts1: - if not part or part == ".": - pass - elif not walk_up: - raise ValueError(f"{target!r} is not in the subpath of {source!r}") - elif part == "..": - raise ValueError(f"'..' segment in {source!r} cannot be walked") - else: - parts0.append("..") - return Path(*reversed(parts0)) - - -def threaded_task(func: callable) -> callable: - """Mark a function as a threaded task, to be run without awaiting. Returns the thread object, so you _can_ await if needed.""" - - import functools - import threading - - @functools.wraps(func) - def wrapper(*args: Any, **kwargs: Any) -> threading.Thread: - thread = threading.Thread(target=func, args=args, kwargs=kwargs) - thread.start() - return thread - - return wrapper - - -def threaded_daemon(func: callable) -> callable: - """Mark a function as a threaded daemon, to be run without awaiting. Returns the thread object, so you _can_ await if needed, and stops when the calling thread terminates.""" - - import functools - import threading - - @functools.wraps(func) - def wrapper(*args: Any, **kwargs: Any) -> threading.Thread: - thread = threading.Thread(target=func, args=args, kwargs=kwargs) - thread.daemon = True - thread.start() - return thread - - return wrapper - - -def pretty_type_repr(type_hint: Any) -> str: - """Returns a pretty string representation of a type hint, including subtypes.""" - type_name = type_hint.__name__ - if ( - "__args__" in dir(type_hint) and type_hint.__args__ - ): # * If the type has subtype info - type_name += "[" - for subtype in type_hint.__args__: - type_name += pretty_type_repr(subtype) - type_name += "]" - return type_name +"""Utilities for the MADSci project.""" + +import json +import sys +from pathlib import Path +from typing import Any, Optional + +from pydantic import ValidationError +from pydantic_core._pydantic_core import PydanticUndefined +from rich.console import Console + +from madsci.common.types.base_types import BaseModel, PathLike + +console = Console() + + +def to_snake_case(name: str) -> str: + """Convert a string to snake case. + + Handles conversion from camelCase and PascalCase to snake_case. + """ + import re + + name = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name) + name = re.sub("([a-z0-9])([A-Z])", r"\1_\2", name) + return name.lower().replace(" ", "_").replace("__", "_") + + +def search_for_file_pattern( + pattern: str, + start_dir: Optional[PathLike] = None, + parents: bool = True, + children: bool = True, +) -> list[str]: + """ + Search up and down the file tree for a file(s) matching a pattern. + + Args: + pattern: The pattern to search for. Standard glob patterns are supported. + start_dir: The directory to start the search in. Defaults to the current directory. + parents: Whether to search in parent directories. + children: Whether to search in subdirectories. + + Returns: + A list of paths to the files that match the pattern. + """ + + start_dir = Path.cwd() if not start_dir else Path(start_dir).resolve() + + results = [] + if children: + results.extend(Path("./").glob(str(Path("**") / pattern))) + else: + results.extend(Path.glob(str(Path("./") / pattern))) + results.extend(Path("./").glob(pattern)) + if parents: + for parent in start_dir.parents: + results.extend(Path(parent).glob(pattern)) + return results + + +def save_model(path: PathLike, model: BaseModel, overwrite_check: bool = True) -> None: + """Save a MADSci model to a YAML file, optionally with a check to overwrite if the file already exists.""" + try: + model.model_validate(model) + except ValidationError as e: + raise ValueError(f"Validation error while saving model {model}: {e}") from e + if ( + Path(path).exists() + and overwrite_check + and not prompt_yes_no(f"File already exists: {path}. Overwrite?", default="no") + ): + return + model.to_yaml(path) + + +def prompt_yes_no(prompt: str, default: str = "no", quiet: bool = False) -> bool: + """Prompt the user for a yes or no answer.""" + response = str( + prompt_for_input( + rf"{prompt} \[y/n]", + default=default, + required=False, + quiet=quiet, + ), + ).lower() + return response in ["y", "yes", "true"] + + +def prompt_for_input( + prompt: str, + default: Optional[str] = None, + required: bool = False, + quiet: bool = False, +) -> str: + """Prompt the user for input.""" + if quiet or not sys.stdin.isatty(): + if default: + return default + if required: + raise ValueError( + "No input provided and no default value specified for required option.", + ) + return None + if not required: + if default: + response = console.input(f"{prompt} (optional, default: {default}): ") + else: + response = console.input(f"{prompt} (optional): ") + if not response: + response = default + else: + response = None + while not response: + if default: + response = console.input(f"{prompt} (required, default: {default}): ") + if not response: + response = default + else: + response = console.input(f"{prompt} (required): ") + return response + + +def new_name_str(prefix: str = "") -> str: + """Generate a new random name string, optionally with a prefix. Make a random combination of an adjective and a noun. Names are not guaranteed to be unique.""" + import random + + adjectives = [ + "happy", + "clever", + "bright", + "swift", + "calm", + "bold", + "eager", + "fair", + "kind", + "proud", + "brave", + "wise", + "quick", + "sharp", + "warm", + "cool", + "fresh", + "keen", + "agile", + "gentle", + "noble", + "merry", + "lively", + "grand", + "smart", + "witty", + "jolly", + "mighty", + "steady", + "pure", + "swift", + "deft", + "sage", + "fleet", + "spry", + "bold", + ] + nouns = [ + "fox", + "owl", + "bear", + "wolf", + "hawk", + "deer", + "lion", + "tiger", + "eagle", + "whale", + "seal", + "dove", + "swan", + "crow", + "duck", + "horse", + "mouse", + "cat", + "lynx", + "puma", + "otter", + "hare", + "raven", + "crane", + "falcon", + "badger", + "marten", + "stoat", + "weasel", + "vole", + "rabbit", + "squirrel", + "raccoon", + "beaver", + "moose", + "elk", + ] + + name = f"{random.choice(adjectives)}_{random.choice(nouns)}" # noqa: S311 + if prefix: + name = f"{prefix}_{name}" + return name + + +def string_to_bool(string: str) -> bool: + """Convert a string to a boolean value.""" + from argparse import ArgumentTypeError + + if string.lower() in ("true", "t", "1", "yes", "y"): + return True + if string.lower() in ("false", "f", "0", "no", "n"): + return False + raise ArgumentTypeError(f"Invalid boolean value: {string}") + + +def prompt_from_list( + prompt: str, + options: list[str], + default: Optional[str] = None, + required: bool = False, + quiet: bool = False, +) -> str: + """Prompt the user for input from a list of options.""" + + # *Print numbered list of options + if not quiet: + for i, option in enumerate(options, 1): + console.print(f"[bold]{i}[/]. {option}") + + # *Allow selection by number or exact match + def validate_response(response: str) -> Optional[str]: + if response in options: + return response + try: + idx = int(response) + if 1 <= idx <= len(options): + return options[idx - 1] + except ValueError: + pass + return None + + while True: + try: + response = validate_response( + prompt_for_input( + prompt, + default=default, + required=required, + quiet=quiet, + ), + ) + except ValueError: + continue + else: + break + return response + + +def prompt_from_pydantic_model(model: BaseModel, prompt: str, **kwargs: Any) -> str: + """Prompt the user for input from a pydantic model. + + Args: + model: The pydantic model to prompt for + prompt: The prompt to display + **kwargs: Pre-filled values to skip prompting for + + Returns: + A dictionary of field values for the model + """ + result = {} + + # Print header for the prompts + console.print(f"\n[bold]{prompt}[/]") + + for field_name, field in model.model_fields.items(): + # Skip if value provided in kwargs + if field_name in kwargs: + result[field_name] = kwargs[field_name] + continue + + # Build field prompt + field_prompt = f"{field.title or field_name}" + + # Add type hint + type_hint = str(field.annotation).replace("typing.", "") + field_prompt += f" ({type_hint})" + + # Add description if available + if field.description: + field_prompt += f"\n{field.description}" + + # Handle basic fields + while True: + try: + response = prompt_for_input( + field_prompt, + default=field.default + if field.default != PydanticUndefined + else None, + required=field.is_required, + ) + if isinstance(response, str): + response = json.loads(response) + result[field_name] = response + except json.JSONDecodeError as e: + console.print( + f"[bold red]Invalid JSON input for field {field_name}: {e}[/]", + ) + continue + else: + break + + return result + + +def relative_path(source: Path, target: Path, walk_up: bool = True) -> Path: + """ + "Backport" of :meth:`pathlib.Path.relative_to` with ``walk_up=True`` + that's not available pre 3.12. + + Return the relative path to another path identified by the passed + arguments. If the operation is not possible (because this is not + related to the other path), raise ValueError. + + The *walk_up* parameter controls whether `..` may be used to resolve + the path. + + References: + https://github.com/python/cpython/blob/8a2baedc4bcb606da937e4e066b4b3a18961cace/Lib/pathlib/_abc.py#L244-L270 + Credit: https://github.com/p2p-ld/numpydantic/blob/66fffc49f87bfaaa2f4d05bf1730c343b10c9cc6/src/numpydantic/serialization.py#L107 + """ + if not isinstance(source, Path): + source = Path(source) + target_parts = target.parts + source_parts = source.parts + anchor0, parts0 = target_parts[0], list(reversed(target_parts[1:])) + anchor1, parts1 = source_parts[0], list(reversed(source_parts[1:])) + if anchor0 != anchor1: + raise ValueError(f"{target!r} and {source!r} have different anchors") + while parts0 and parts1 and parts0[-1] == parts1[-1]: + parts0.pop() + parts1.pop() + for part in parts1: + if not part or part == ".": + pass + elif not walk_up: + raise ValueError(f"{target!r} is not in the subpath of {source!r}") + elif part == "..": + raise ValueError(f"'..' segment in {source!r} cannot be walked") + else: + parts0.append("..") + return Path(*reversed(parts0)) + + +def threaded_task(func: callable) -> callable: + """Mark a function as a threaded task, to be run without awaiting. Returns the thread object, so you _can_ await if needed.""" + + import functools + import threading + + @functools.wraps(func) + def wrapper(*args: Any, **kwargs: Any) -> threading.Thread: + thread = threading.Thread(target=func, args=args, kwargs=kwargs) + thread.start() + return thread + + return wrapper + + +def threaded_daemon(func: callable) -> callable: + """Mark a function as a threaded daemon, to be run without awaiting. Returns the thread object, so you _can_ await if needed, and stops when the calling thread terminates.""" + + import functools + import threading + + @functools.wraps(func) + def wrapper(*args: Any, **kwargs: Any) -> threading.Thread: + thread = threading.Thread(target=func, args=args, kwargs=kwargs) + thread.daemon = True + thread.start() + return thread + + return wrapper + + +def pretty_type_repr(type_hint: Any) -> str: + """Returns a pretty string representation of a type hint, including subtypes.""" + type_name = type_hint.__name__ + if ( + "__args__" in dir(type_hint) and type_hint.__args__ + ): # * If the type has subtype info + type_name += "[" + for subtype in type_hint.__args__: + type_name += pretty_type_repr(subtype) + type_name += "]" + return type_name diff --git a/madsci/madsci_common/pdm.lock b/madsci/madsci_common/pdm.lock index 3034e76..337af48 100644 --- a/madsci/madsci_common/pdm.lock +++ b/madsci/madsci_common/pdm.lock @@ -1,1562 +1,1562 @@ -# This file is @generated by PDM. -# It is not intended for manual editing. - -[metadata] -groups = ["default", "server", "tests"] -strategy = ["inherit_metadata"] -lock_version = "4.5.0" -content_hash = "sha256:48620a5f03867071175c529837f308437d8b523a041a4834e63223a3b8bed639" - -[[metadata.targets]] -requires_python = ">=3.9.1" - -[[package]] -name = "aenum" -version = "3.1.15" -summary = "Advanced Enumerations (compatible with Python's stdlib Enum), NamedTuples, and NamedConstants" -groups = ["default"] -files = [ - {file = "aenum-3.1.15-py3-none-any.whl", hash = "sha256:e0dfaeea4c2bd362144b87377e2c61d91958c5ed0b4daf89cb6f45ae23af6288"}, - {file = "aenum-3.1.15.tar.gz", hash = "sha256:8cbd76cd18c4f870ff39b24284d3ea028fbe8731a58df3aa581e434c575b9559"}, -] - -[[package]] -name = "annotated-types" -version = "0.7.0" -requires_python = ">=3.8" -summary = "Reusable constraint types to use with typing.Annotated" -groups = ["default", "server"] -dependencies = [ - "typing-extensions>=4.0.0; python_version < \"3.9\"", -] -files = [ - {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, - {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, -] - -[[package]] -name = "anyio" -version = "4.6.2.post1" -requires_python = ">=3.9" -summary = "High level compatibility layer for multiple asynchronous event loop implementations" -groups = ["server"] -dependencies = [ - "exceptiongroup>=1.0.2; python_version < \"3.11\"", - "idna>=2.8", - "sniffio>=1.1", - "typing-extensions>=4.1; python_version < \"3.11\"", -] -files = [ - {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, - {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, -] - -[[package]] -name = "appnope" -version = "0.1.4" -requires_python = ">=3.6" -summary = "Disable App Nap on macOS >= 10.9" -groups = ["tests"] -marker = "platform_system == \"Darwin\"" -files = [ - {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, - {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, -] - -[[package]] -name = "asttokens" -version = "2.4.1" -summary = "Annotate AST trees with source code positions" -groups = ["tests"] -dependencies = [ - "six>=1.12.0", - "typing; python_version < \"3.5\"", -] -files = [ - {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, - {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, -] - -[[package]] -name = "certifi" -version = "2024.8.30" -requires_python = ">=3.6" -summary = "Python package for providing Mozilla's CA Bundle." -groups = ["default"] -files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, -] - -[[package]] -name = "cffi" -version = "1.17.1" -requires_python = ">=3.8" -summary = "Foreign Function Interface for Python calling C code." -groups = ["tests"] -marker = "implementation_name == \"pypy\"" -dependencies = [ - "pycparser", -] -files = [ - {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, - {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, - {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, - {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, - {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, - {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, - {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, - {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, - {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, - {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, - {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, - {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, - {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.4.0" -requires_python = ">=3.7.0" -summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -groups = ["default"] -files = [ - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, - {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, - {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, -] - -[[package]] -name = "click" -version = "8.1.7" -requires_python = ">=3.7" -summary = "Composable command line interface toolkit" -groups = ["server"] -dependencies = [ - "colorama; platform_system == \"Windows\"", - "importlib-metadata; python_version < \"3.8\"", -] -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[[package]] -name = "colorama" -version = "0.4.6" -requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -summary = "Cross-platform colored terminal text." -groups = ["server", "tests"] -marker = "sys_platform == \"win32\" or platform_system == \"Windows\"" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "comm" -version = "0.2.2" -requires_python = ">=3.8" -summary = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." -groups = ["tests"] -dependencies = [ - "traitlets>=4", -] -files = [ - {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, - {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, -] - -[[package]] -name = "debugpy" -version = "1.8.8" -requires_python = ">=3.8" -summary = "An implementation of the Debug Adapter Protocol for Python" -groups = ["tests"] -files = [ - {file = "debugpy-1.8.8-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:e59b1607c51b71545cb3496876544f7186a7a27c00b436a62f285603cc68d1c6"}, - {file = "debugpy-1.8.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6531d952b565b7cb2fbd1ef5df3d333cf160b44f37547a4e7cf73666aca5d8d"}, - {file = "debugpy-1.8.8-cp310-cp310-win32.whl", hash = "sha256:b01f4a5e5c5fb1d34f4ccba99a20ed01eabc45a4684f4948b5db17a319dfb23f"}, - {file = "debugpy-1.8.8-cp310-cp310-win_amd64.whl", hash = "sha256:535f4fb1c024ddca5913bb0eb17880c8f24ba28aa2c225059db145ee557035e9"}, - {file = "debugpy-1.8.8-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:c399023146e40ae373753a58d1be0a98bf6397fadc737b97ad612886b53df318"}, - {file = "debugpy-1.8.8-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09cc7b162586ea2171eea055985da2702b0723f6f907a423c9b2da5996ad67ba"}, - {file = "debugpy-1.8.8-cp311-cp311-win32.whl", hash = "sha256:eea8821d998ebeb02f0625dd0d76839ddde8cbf8152ebbe289dd7acf2cdc6b98"}, - {file = "debugpy-1.8.8-cp311-cp311-win_amd64.whl", hash = "sha256:d4483836da2a533f4b1454dffc9f668096ac0433de855f0c22cdce8c9f7e10c4"}, - {file = "debugpy-1.8.8-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:0cc94186340be87b9ac5a707184ec8f36547fb66636d1029ff4f1cc020e53996"}, - {file = "debugpy-1.8.8-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64674e95916e53c2e9540a056e5f489e0ad4872645399d778f7c598eacb7b7f9"}, - {file = "debugpy-1.8.8-cp312-cp312-win32.whl", hash = "sha256:5c6e885dbf12015aed73770f29dec7023cb310d0dc2ba8bfbeb5c8e43f80edc9"}, - {file = "debugpy-1.8.8-cp312-cp312-win_amd64.whl", hash = "sha256:19ffbd84e757a6ca0113574d1bf5a2298b3947320a3e9d7d8dc3377f02d9f864"}, - {file = "debugpy-1.8.8-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:705cd123a773d184860ed8dae99becd879dfec361098edbefb5fc0d3683eb804"}, - {file = "debugpy-1.8.8-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:890fd16803f50aa9cb1a9b9b25b5ec321656dd6b78157c74283de241993d086f"}, - {file = "debugpy-1.8.8-cp313-cp313-win32.whl", hash = "sha256:90244598214bbe704aa47556ec591d2f9869ff9e042e301a2859c57106649add"}, - {file = "debugpy-1.8.8-cp313-cp313-win_amd64.whl", hash = "sha256:4b93e4832fd4a759a0c465c967214ed0c8a6e8914bced63a28ddb0dd8c5f078b"}, - {file = "debugpy-1.8.8-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:53709d4ec586b525724819dc6af1a7703502f7e06f34ded7157f7b1f963bb854"}, - {file = "debugpy-1.8.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a9c013077a3a0000e83d97cf9cc9328d2b0bbb31f56b0e99ea3662d29d7a6a2"}, - {file = "debugpy-1.8.8-cp39-cp39-win32.whl", hash = "sha256:ffe94dd5e9a6739a75f0b85316dc185560db3e97afa6b215628d1b6a17561cb2"}, - {file = "debugpy-1.8.8-cp39-cp39-win_amd64.whl", hash = "sha256:5c0e5a38c7f9b481bf31277d2f74d2109292179081f11108e668195ef926c0f9"}, - {file = "debugpy-1.8.8-py2.py3-none-any.whl", hash = "sha256:ec684553aba5b4066d4de510859922419febc710df7bba04fe9e7ef3de15d34f"}, - {file = "debugpy-1.8.8.zip", hash = "sha256:e6355385db85cbd666be703a96ab7351bc9e6c61d694893206f8001e22aee091"}, -] - -[[package]] -name = "decorator" -version = "5.1.1" -requires_python = ">=3.5" -summary = "Decorators for Humans" -groups = ["tests"] -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.2" -requires_python = ">=3.7" -summary = "Backport of PEP 654 (exception groups)" -groups = ["server", "tests"] -marker = "python_version < \"3.11\"" -files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, -] - -[[package]] -name = "executing" -version = "2.1.0" -requires_python = ">=3.8" -summary = "Get the currently executing AST node of a frame, and other information" -groups = ["tests"] -files = [ - {file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"}, - {file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"}, -] - -[[package]] -name = "fastapi" -version = "0.115.4" -requires_python = ">=3.8" -summary = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" -groups = ["server"] -dependencies = [ - "pydantic!=1.8,!=1.8.1,!=2.0.0,!=2.0.1,!=2.1.0,<3.0.0,>=1.7.4", - "starlette<0.42.0,>=0.40.0", - "typing-extensions>=4.8.0", -] -files = [ - {file = "fastapi-0.115.4-py3-none-any.whl", hash = "sha256:0b504a063ffb3cf96a5e27dc1bc32c80ca743a2528574f9cdc77daa2d31b4742"}, - {file = "fastapi-0.115.4.tar.gz", hash = "sha256:db653475586b091cb8b2fec2ac54a680ac6a158e07406e1abae31679e8826349"}, -] - -[[package]] -name = "greenlet" -version = "3.1.1" -requires_python = ">=3.7" -summary = "Lightweight in-process concurrent programming" -groups = ["default"] -marker = "(platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\"" -files = [ - {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, - {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, - {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, - {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, - {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, - {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, - {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, - {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, - {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, - {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, - {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, - {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, - {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, - {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, - {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, - {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, - {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, - {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, - {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, - {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, - {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, - {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, - {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, - {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, -] - -[[package]] -name = "h11" -version = "0.14.0" -requires_python = ">=3.7" -summary = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -groups = ["server"] -dependencies = [ - "typing-extensions; python_version < \"3.8\"", -] -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "httptools" -version = "0.6.4" -requires_python = ">=3.8.0" -summary = "A collection of framework independent HTTP protocol utils." -groups = ["server"] -files = [ - {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, - {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, - {file = "httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1"}, - {file = "httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50"}, - {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959"}, - {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4"}, - {file = "httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c"}, - {file = "httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069"}, - {file = "httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a"}, - {file = "httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975"}, - {file = "httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636"}, - {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721"}, - {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988"}, - {file = "httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17"}, - {file = "httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2"}, - {file = "httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44"}, - {file = "httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1"}, - {file = "httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2"}, - {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"}, - {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"}, - {file = "httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"}, - {file = "httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660"}, - {file = "httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083"}, - {file = "httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3"}, - {file = "httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071"}, - {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5"}, - {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0"}, - {file = "httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8"}, - {file = "httptools-0.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85797e37e8eeaa5439d33e556662cc370e474445d5fab24dcadc65a8ffb04003"}, - {file = "httptools-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:db353d22843cf1028f43c3651581e4bb49374d85692a85f95f7b9a130e1b2cab"}, - {file = "httptools-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ffd262a73d7c28424252381a5b854c19d9de5f56f075445d33919a637e3547"}, - {file = "httptools-0.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c346571fa50d2e9856a37d7cd9435a25e7fd15e236c397bf224afaa355fe9"}, - {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aafe0f1918ed07b67c1e838f950b1c1fabc683030477e60b335649b8020e1076"}, - {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0e563e54979e97b6d13f1bbc05a96109923e76b901f786a5eae36e99c01237bd"}, - {file = "httptools-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:b799de31416ecc589ad79dd85a0b2657a8fe39327944998dea368c1d4c9e55e6"}, - {file = "httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c"}, -] - -[[package]] -name = "idna" -version = "3.10" -requires_python = ">=3.6" -summary = "Internationalized Domain Names in Applications (IDNA)" -groups = ["default", "server"] -files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, -] - -[[package]] -name = "importlib-metadata" -version = "8.5.0" -requires_python = ">=3.8" -summary = "Read metadata from Python packages" -groups = ["tests"] -marker = "python_version < \"3.10\"" -dependencies = [ - "typing-extensions>=3.6.4; python_version < \"3.8\"", - "zipp>=3.20", -] -files = [ - {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, - {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, -] - -[[package]] -name = "ipykernel" -version = "6.29.5" -requires_python = ">=3.8" -summary = "IPython Kernel for Jupyter" -groups = ["tests"] -dependencies = [ - "appnope; platform_system == \"Darwin\"", - "comm>=0.1.1", - "debugpy>=1.6.5", - "ipython>=7.23.1", - "jupyter-client>=6.1.12", - "jupyter-core!=5.0.*,>=4.12", - "matplotlib-inline>=0.1", - "nest-asyncio", - "packaging", - "psutil", - "pyzmq>=24", - "tornado>=6.1", - "traitlets>=5.4.0", -] -files = [ - {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, - {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, -] - -[[package]] -name = "ipython" -version = "8.18.1" -requires_python = ">=3.9" -summary = "IPython: Productive Interactive Computing" -groups = ["tests"] -dependencies = [ - "colorama; sys_platform == \"win32\"", - "decorator", - "exceptiongroup; python_version < \"3.11\"", - "jedi>=0.16", - "matplotlib-inline", - "pexpect>4.3; sys_platform != \"win32\"", - "prompt-toolkit<3.1.0,>=3.0.41", - "pygments>=2.4.0", - "stack-data", - "traitlets>=5", - "typing-extensions; python_version < \"3.10\"", -] -files = [ - {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, - {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, -] - -[[package]] -name = "jedi" -version = "0.19.2" -requires_python = ">=3.6" -summary = "An autocompletion tool for Python that can be used for text editors." -groups = ["tests"] -dependencies = [ - "parso<0.9.0,>=0.8.4", -] -files = [ - {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, - {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, -] - -[[package]] -name = "jupyter-client" -version = "8.6.3" -requires_python = ">=3.8" -summary = "Jupyter protocol implementation and client libraries" -groups = ["tests"] -dependencies = [ - "importlib-metadata>=4.8.3; python_version < \"3.10\"", - "jupyter-core!=5.0.*,>=4.12", - "python-dateutil>=2.8.2", - "pyzmq>=23.0", - "tornado>=6.2", - "traitlets>=5.3", -] -files = [ - {file = "jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f"}, - {file = "jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419"}, -] - -[[package]] -name = "jupyter-core" -version = "5.7.2" -requires_python = ">=3.8" -summary = "Jupyter core package. A base package on which Jupyter projects rely." -groups = ["tests"] -dependencies = [ - "platformdirs>=2.5", - "pywin32>=300; sys_platform == \"win32\" and platform_python_implementation != \"PyPy\"", - "traitlets>=5.3", -] -files = [ - {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, - {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, -] - -[[package]] -name = "matplotlib-inline" -version = "0.1.7" -requires_python = ">=3.8" -summary = "Inline Matplotlib backend for Jupyter" -groups = ["tests"] -dependencies = [ - "traitlets", -] -files = [ - {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, - {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, -] - -[[package]] -name = "nest-asyncio" -version = "1.6.0" -requires_python = ">=3.5" -summary = "Patch asyncio to allow nested event loops" -groups = ["tests"] -files = [ - {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, - {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, -] - -[[package]] -name = "packaging" -version = "24.2" -requires_python = ">=3.8" -summary = "Core utilities for Python packages" -groups = ["tests"] -files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, -] - -[[package]] -name = "parso" -version = "0.8.4" -requires_python = ">=3.6" -summary = "A Python Parser" -groups = ["tests"] -files = [ - {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, - {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, -] - -[[package]] -name = "pexpect" -version = "4.9.0" -summary = "Pexpect allows easy control of interactive console applications." -groups = ["tests"] -marker = "sys_platform != \"win32\"" -dependencies = [ - "ptyprocess>=0.5", -] -files = [ - {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, - {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, -] - -[[package]] -name = "platformdirs" -version = "4.3.6" -requires_python = ">=3.8" -summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -groups = ["tests"] -files = [ - {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, - {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, -] - -[[package]] -name = "prompt-toolkit" -version = "3.0.48" -requires_python = ">=3.7.0" -summary = "Library for building powerful interactive command lines in Python" -groups = ["tests"] -dependencies = [ - "wcwidth", -] -files = [ - {file = "prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e"}, - {file = "prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90"}, -] - -[[package]] -name = "psutil" -version = "6.1.0" -requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -summary = "Cross-platform lib for process and system monitoring in Python." -groups = ["tests"] -files = [ - {file = "psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688"}, - {file = "psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e"}, - {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38"}, - {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b"}, - {file = "psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a"}, - {file = "psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e"}, - {file = "psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be"}, - {file = "psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a"}, -] - -[[package]] -name = "ptyprocess" -version = "0.7.0" -summary = "Run a subprocess in a pseudo terminal" -groups = ["tests"] -marker = "sys_platform != \"win32\"" -files = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] - -[[package]] -name = "pure-eval" -version = "0.2.3" -summary = "Safely evaluate AST nodes without side effects" -groups = ["tests"] -files = [ - {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, - {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, -] - -[[package]] -name = "pycparser" -version = "2.22" -requires_python = ">=3.8" -summary = "C parser in Python" -groups = ["tests"] -marker = "implementation_name == \"pypy\"" -files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, -] - -[[package]] -name = "pydantic" -version = "2.9.2" -requires_python = ">=3.8" -summary = "Data validation using Python type hints" -groups = ["default", "server"] -dependencies = [ - "annotated-types>=0.6.0", - "pydantic-core==2.23.4", - "typing-extensions>=4.12.2; python_version >= \"3.13\"", - "typing-extensions>=4.6.1; python_version < \"3.13\"", -] -files = [ - {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, - {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, -] - -[[package]] -name = "pydantic-core" -version = "2.23.4" -requires_python = ">=3.8" -summary = "Core functionality for Pydantic validation and serialization" -groups = ["default", "server"] -dependencies = [ - "typing-extensions!=4.7.0,>=4.6.0", -] -files = [ - {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, - {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, - {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, - {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, - {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, - {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, - {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, - {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, - {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, - {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, - {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, - {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, - {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, - {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, - {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, - {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, - {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, - {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, - {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, - {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, - {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, - {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, - {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, - {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, - {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, - {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, - {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, - {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, - {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, - {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, - {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, -] - -[[package]] -name = "pygments" -version = "2.18.0" -requires_python = ">=3.8" -summary = "Pygments is a syntax highlighting package written in Python." -groups = ["tests"] -files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, -] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -summary = "Extensions to the standard Python datetime module" -groups = ["tests"] -dependencies = [ - "six>=1.5", -] -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[[package]] -name = "python-dotenv" -version = "1.0.1" -requires_python = ">=3.8" -summary = "Read key-value pairs from a .env file and set them as environment variables" -groups = ["default", "server"] -files = [ - {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, - {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, -] - -[[package]] -name = "python-multipart" -version = "0.0.17" -requires_python = ">=3.8" -summary = "A streaming multipart parser for Python" -groups = ["server"] -files = [ - {file = "python_multipart-0.0.17-py3-none-any.whl", hash = "sha256:15dc4f487e0a9476cc1201261188ee0940165cffc94429b6fc565c4d3045cb5d"}, - {file = "python_multipart-0.0.17.tar.gz", hash = "sha256:41330d831cae6e2f22902704ead2826ea038d0419530eadff3ea80175aec5538"}, -] - -[[package]] -name = "python-ulid" -version = "3.0.0" -requires_python = ">=3.9" -summary = "Universally unique lexicographically sortable identifier" -groups = ["default"] -files = [ - {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, - {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, -] - -[[package]] -name = "python-ulid" -version = "3.0.0" -extras = ["pydantic"] -requires_python = ">=3.9" -summary = "Universally unique lexicographically sortable identifier" -groups = ["default"] -dependencies = [ - "pydantic>=2.0", - "python-ulid==3.0.0", -] -files = [ - {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, - {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, -] - -[[package]] -name = "pywin32" -version = "308" -summary = "Python for Window Extensions" -groups = ["tests"] -marker = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\"" -files = [ - {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, - {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, - {file = "pywin32-308-cp310-cp310-win_arm64.whl", hash = "sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c"}, - {file = "pywin32-308-cp311-cp311-win32.whl", hash = "sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a"}, - {file = "pywin32-308-cp311-cp311-win_amd64.whl", hash = "sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b"}, - {file = "pywin32-308-cp311-cp311-win_arm64.whl", hash = "sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6"}, - {file = "pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897"}, - {file = "pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47"}, - {file = "pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091"}, - {file = "pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed"}, - {file = "pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4"}, - {file = "pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd"}, - {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, - {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -requires_python = ">=3.8" -summary = "YAML parser and emitter for Python" -groups = ["default", "server"] -files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, -] - -[[package]] -name = "pyzmq" -version = "26.2.0" -requires_python = ">=3.7" -summary = "Python bindings for 0MQ" -groups = ["tests"] -dependencies = [ - "cffi; implementation_name == \"pypy\"", -] -files = [ - {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ddf33d97d2f52d89f6e6e7ae66ee35a4d9ca6f36eda89c24591b0c40205a3629"}, - {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dacd995031a01d16eec825bf30802fceb2c3791ef24bcce48fa98ce40918c27b"}, - {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89289a5ee32ef6c439086184529ae060c741334b8970a6855ec0b6ad3ff28764"}, - {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5506f06d7dc6ecf1efacb4a013b1f05071bb24b76350832c96449f4a2d95091c"}, - {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea039387c10202ce304af74def5021e9adc6297067f3441d348d2b633e8166a"}, - {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2224fa4a4c2ee872886ed00a571f5e967c85e078e8e8c2530a2fb01b3309b88"}, - {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:28ad5233e9c3b52d76196c696e362508959741e1a005fb8fa03b51aea156088f"}, - {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1c17211bc037c7d88e85ed8b7d8f7e52db6dc8eca5590d162717c654550f7282"}, - {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b8f86dd868d41bea9a5f873ee13bf5551c94cf6bc51baebc6f85075971fe6eea"}, - {file = "pyzmq-26.2.0-cp310-cp310-win32.whl", hash = "sha256:46a446c212e58456b23af260f3d9fb785054f3e3653dbf7279d8f2b5546b21c2"}, - {file = "pyzmq-26.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:49d34ab71db5a9c292a7644ce74190b1dd5a3475612eefb1f8be1d6961441971"}, - {file = "pyzmq-26.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:bfa832bfa540e5b5c27dcf5de5d82ebc431b82c453a43d141afb1e5d2de025fa"}, - {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:8f7e66c7113c684c2b3f1c83cdd3376103ee0ce4c49ff80a648643e57fb22218"}, - {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3a495b30fc91db2db25120df5847d9833af237546fd59170701acd816ccc01c4"}, - {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77eb0968da535cba0470a5165468b2cac7772cfb569977cff92e240f57e31bef"}, - {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ace4f71f1900a548f48407fc9be59c6ba9d9aaf658c2eea6cf2779e72f9f317"}, - {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a78853d7280bffb93df0a4a6a2498cba10ee793cc8076ef797ef2f74d107cf"}, - {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:689c5d781014956a4a6de61d74ba97b23547e431e9e7d64f27d4922ba96e9d6e"}, - {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aca98bc423eb7d153214b2df397c6421ba6373d3397b26c057af3c904452e37"}, - {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f3496d76b89d9429a656293744ceca4d2ac2a10ae59b84c1da9b5165f429ad3"}, - {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5c2b3bfd4b9689919db068ac6c9911f3fcb231c39f7dd30e3138be94896d18e6"}, - {file = "pyzmq-26.2.0-cp311-cp311-win32.whl", hash = "sha256:eac5174677da084abf378739dbf4ad245661635f1600edd1221f150b165343f4"}, - {file = "pyzmq-26.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a509df7d0a83a4b178d0f937ef14286659225ef4e8812e05580776c70e155d5"}, - {file = "pyzmq-26.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0e6091b157d48cbe37bd67233318dbb53e1e6327d6fc3bb284afd585d141003"}, - {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9"}, - {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52"}, - {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08"}, - {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5"}, - {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae"}, - {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711"}, - {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6"}, - {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3"}, - {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b"}, - {file = "pyzmq-26.2.0-cp312-cp312-win32.whl", hash = "sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7"}, - {file = "pyzmq-26.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a"}, - {file = "pyzmq-26.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b"}, - {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726"}, - {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3"}, - {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50"}, - {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb"}, - {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187"}, - {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b"}, - {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18"}, - {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115"}, - {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e"}, - {file = "pyzmq-26.2.0-cp313-cp313-win32.whl", hash = "sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5"}, - {file = "pyzmq-26.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad"}, - {file = "pyzmq-26.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797"}, - {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a"}, - {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc"}, - {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5"}, - {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672"}, - {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797"}, - {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386"}, - {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306"}, - {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6"}, - {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0"}, - {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:b1d464cb8d72bfc1a3adc53305a63a8e0cac6bc8c5a07e8ca190ab8d3faa43c2"}, - {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4da04c48873a6abdd71811c5e163bd656ee1b957971db7f35140a2d573f6949c"}, - {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d049df610ac811dcffdc147153b414147428567fbbc8be43bb8885f04db39d98"}, - {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05590cdbc6b902101d0e65d6a4780af14dc22914cc6ab995d99b85af45362cc9"}, - {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c811cfcd6a9bf680236c40c6f617187515269ab2912f3d7e8c0174898e2519db"}, - {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6835dd60355593de10350394242b5757fbbd88b25287314316f266e24c61d073"}, - {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc6bee759a6bddea5db78d7dcd609397449cb2d2d6587f48f3ca613b19410cfc"}, - {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c530e1eecd036ecc83c3407f77bb86feb79916d4a33d11394b8234f3bd35b940"}, - {file = "pyzmq-26.2.0-cp39-cp39-win32.whl", hash = "sha256:367b4f689786fca726ef7a6c5ba606958b145b9340a5e4808132cc65759abd44"}, - {file = "pyzmq-26.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:e6fa2e3e683f34aea77de8112f6483803c96a44fd726d7358b9888ae5bb394ec"}, - {file = "pyzmq-26.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:7445be39143a8aa4faec43b076e06944b8f9d0701b669df4af200531b21e40bb"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:706e794564bec25819d21a41c31d4df2d48e1cc4b061e8d345d7fb4dd3e94072"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b435f2753621cd36e7c1762156815e21c985c72b19135dac43a7f4f31d28dd1"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160c7e0a5eb178011e72892f99f918c04a131f36056d10d9c1afb223fc952c2d"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4a71d5d6e7b28a47a394c0471b7e77a0661e2d651e7ae91e0cab0a587859ca"}, - {file = "pyzmq-26.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:90412f2db8c02a3864cbfc67db0e3dcdbda336acf1c469526d3e869394fe001c"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cdeabcff45d1c219636ee2e54d852262e5c2e085d6cb476d938aee8d921356b3"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35cffef589bcdc587d06f9149f8d5e9e8859920a071df5a2671de2213bef592a"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18c8dc3b7468d8b4bdf60ce9d7141897da103c7a4690157b32b60acb45e333e6"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7133d0a1677aec369d67dd78520d3fa96dd7f3dcec99d66c1762870e5ea1a50a"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a96179a24b14fa6428cbfc08641c779a53f8fcec43644030328f44034c7f1f4"}, - {file = "pyzmq-26.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4f78c88905461a9203eac9faac157a2a0dbba84a0fd09fd29315db27be40af9f"}, - {file = "pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f"}, -] - -[[package]] -name = "requests" -version = "2.32.3" -requires_python = ">=3.8" -summary = "Python HTTP for Humans." -groups = ["default"] -dependencies = [ - "certifi>=2017.4.17", - "charset-normalizer<4,>=2", - "idna<4,>=2.5", - "urllib3<3,>=1.21.1", -] -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[[package]] -name = "six" -version = "1.16.0" -requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -summary = "Python 2 and 3 compatibility utilities" -groups = ["tests"] -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -requires_python = ">=3.7" -summary = "Sniff out which async library your code is running under" -groups = ["server"] -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - -[[package]] -name = "sqlalchemy" -version = "2.0.36" -requires_python = ">=3.7" -summary = "Database Abstraction Library" -groups = ["default"] -dependencies = [ - "greenlet!=0.4.17; (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\"", - "importlib-metadata; python_version < \"3.8\"", - "typing-extensions>=4.6.0", -] -files = [ - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, - {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, - {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, -] - -[[package]] -name = "sqlmodel" -version = "0.0.22" -requires_python = ">=3.7" -summary = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness." -groups = ["default"] -dependencies = [ - "SQLAlchemy<2.1.0,>=2.0.14", - "pydantic<3.0.0,>=1.10.13", -] -files = [ - {file = "sqlmodel-0.0.22-py3-none-any.whl", hash = "sha256:a1ed13e28a1f4057cbf4ff6cdb4fc09e85702621d3259ba17b3c230bfb2f941b"}, - {file = "sqlmodel-0.0.22.tar.gz", hash = "sha256:7d37c882a30c43464d143e35e9ecaf945d88035e20117bf5ec2834a23cbe505e"}, -] - -[[package]] -name = "stack-data" -version = "0.6.3" -summary = "Extract data from python stack frames and tracebacks for informative displays" -groups = ["tests"] -dependencies = [ - "asttokens>=2.1.0", - "executing>=1.2.0", - "pure-eval", -] -files = [ - {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, - {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, -] - -[[package]] -name = "starlette" -version = "0.41.2" -requires_python = ">=3.8" -summary = "The little ASGI library that shines." -groups = ["server"] -dependencies = [ - "anyio<5,>=3.4.0", - "typing-extensions>=3.10.0; python_version < \"3.10\"", -] -files = [ - {file = "starlette-0.41.2-py3-none-any.whl", hash = "sha256:fbc189474b4731cf30fcef52f18a8d070e3f3b46c6a04c97579e85e6ffca942d"}, - {file = "starlette-0.41.2.tar.gz", hash = "sha256:9834fd799d1a87fd346deb76158668cfa0b0d56f85caefe8268e2d97c3468b62"}, -] - -[[package]] -name = "tornado" -version = "6.4.1" -requires_python = ">=3.8" -summary = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -groups = ["tests"] -files = [ - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, - {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, - {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, - {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, - {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, - {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, - {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, -] - -[[package]] -name = "traitlets" -version = "5.14.3" -requires_python = ">=3.8" -summary = "Traitlets Python configuration system" -groups = ["tests"] -files = [ - {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, - {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, -] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -requires_python = ">=3.8" -summary = "Backported and Experimental Type Hints for Python 3.8+" -groups = ["default", "server", "tests"] -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[[package]] -name = "urllib3" -version = "2.2.3" -requires_python = ">=3.8" -summary = "HTTP library with thread-safe connection pooling, file post, and more." -groups = ["default"] -files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, -] - -[[package]] -name = "uvicorn" -version = "0.32.0" -requires_python = ">=3.8" -summary = "The lightning-fast ASGI server." -groups = ["server"] -dependencies = [ - "click>=7.0", - "h11>=0.8", - "typing-extensions>=4.0; python_version < \"3.11\"", -] -files = [ - {file = "uvicorn-0.32.0-py3-none-any.whl", hash = "sha256:60b8f3a5ac027dcd31448f411ced12b5ef452c646f76f02f8cc3f25d8d26fd82"}, - {file = "uvicorn-0.32.0.tar.gz", hash = "sha256:f78b36b143c16f54ccdb8190d0a26b5f1901fe5a3c777e1ab29f26391af8551e"}, -] - -[[package]] -name = "uvicorn" -version = "0.32.0" -extras = ["standard"] -requires_python = ">=3.8" -summary = "The lightning-fast ASGI server." -groups = ["server"] -dependencies = [ - "colorama>=0.4; sys_platform == \"win32\"", - "httptools>=0.5.0", - "python-dotenv>=0.13", - "pyyaml>=5.1", - "uvicorn==0.32.0", - "uvloop!=0.15.0,!=0.15.1,>=0.14.0; (sys_platform != \"cygwin\" and sys_platform != \"win32\") and platform_python_implementation != \"PyPy\"", - "watchfiles>=0.13", - "websockets>=10.4", -] -files = [ - {file = "uvicorn-0.32.0-py3-none-any.whl", hash = "sha256:60b8f3a5ac027dcd31448f411ced12b5ef452c646f76f02f8cc3f25d8d26fd82"}, - {file = "uvicorn-0.32.0.tar.gz", hash = "sha256:f78b36b143c16f54ccdb8190d0a26b5f1901fe5a3c777e1ab29f26391af8551e"}, -] - -[[package]] -name = "uvloop" -version = "0.21.0" -requires_python = ">=3.8.0" -summary = "Fast implementation of asyncio event loop on top of libuv" -groups = ["server"] -marker = "(sys_platform != \"cygwin\" and sys_platform != \"win32\") and platform_python_implementation != \"PyPy\"" -files = [ - {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, - {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, - {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26"}, - {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb"}, - {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f"}, - {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c"}, - {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8"}, - {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0"}, - {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e"}, - {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb"}, - {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6"}, - {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d"}, - {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c"}, - {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2"}, - {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d"}, - {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc"}, - {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb"}, - {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f"}, - {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281"}, - {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af"}, - {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6"}, - {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816"}, - {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc"}, - {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553"}, - {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b"}, - {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2"}, - {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0"}, - {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75"}, - {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd"}, - {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff"}, - {file = "uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3"}, -] - -[[package]] -name = "watchfiles" -version = "0.24.0" -requires_python = ">=3.8" -summary = "Simple, modern and high performance file watching and code reload in python." -groups = ["server"] -dependencies = [ - "anyio>=3.0.0", -] -files = [ - {file = "watchfiles-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:083dc77dbdeef09fa44bb0f4d1df571d2e12d8a8f985dccde71ac3ac9ac067a0"}, - {file = "watchfiles-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e94e98c7cb94cfa6e071d401ea3342767f28eb5a06a58fafdc0d2a4974f4f35c"}, - {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82ae557a8c037c42a6ef26c494d0631cacca040934b101d001100ed93d43f361"}, - {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:acbfa31e315a8f14fe33e3542cbcafc55703b8f5dcbb7c1eecd30f141df50db3"}, - {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b74fdffce9dfcf2dc296dec8743e5b0332d15df19ae464f0e249aa871fc1c571"}, - {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:449f43f49c8ddca87c6b3980c9284cab6bd1f5c9d9a2b00012adaaccd5e7decd"}, - {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4abf4ad269856618f82dee296ac66b0cd1d71450fc3c98532d93798e73399b7a"}, - {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f895d785eb6164678ff4bb5cc60c5996b3ee6df3edb28dcdeba86a13ea0465e"}, - {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ae3e208b31be8ce7f4c2c0034f33406dd24fbce3467f77223d10cd86778471c"}, - {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2efec17819b0046dde35d13fb8ac7a3ad877af41ae4640f4109d9154ed30a188"}, - {file = "watchfiles-0.24.0-cp310-none-win32.whl", hash = "sha256:6bdcfa3cd6fdbdd1a068a52820f46a815401cbc2cb187dd006cb076675e7b735"}, - {file = "watchfiles-0.24.0-cp310-none-win_amd64.whl", hash = "sha256:54ca90a9ae6597ae6dc00e7ed0a040ef723f84ec517d3e7ce13e63e4bc82fa04"}, - {file = "watchfiles-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:bdcd5538e27f188dd3c804b4a8d5f52a7fc7f87e7fd6b374b8e36a4ca03db428"}, - {file = "watchfiles-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2dadf8a8014fde6addfd3c379e6ed1a981c8f0a48292d662e27cabfe4239c83c"}, - {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6509ed3f467b79d95fc62a98229f79b1a60d1b93f101e1c61d10c95a46a84f43"}, - {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8360f7314a070c30e4c976b183d1d8d1585a4a50c5cb603f431cebcbb4f66327"}, - {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:316449aefacf40147a9efaf3bd7c9bdd35aaba9ac5d708bd1eb5763c9a02bef5"}, - {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73bde715f940bea845a95247ea3e5eb17769ba1010efdc938ffcb967c634fa61"}, - {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3770e260b18e7f4e576edca4c0a639f704088602e0bc921c5c2e721e3acb8d15"}, - {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa0fd7248cf533c259e59dc593a60973a73e881162b1a2f73360547132742823"}, - {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d7a2e3b7f5703ffbd500dabdefcbc9eafeff4b9444bbdd5d83d79eedf8428fab"}, - {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d831ee0a50946d24a53821819b2327d5751b0c938b12c0653ea5be7dea9c82ec"}, - {file = "watchfiles-0.24.0-cp311-none-win32.whl", hash = "sha256:49d617df841a63b4445790a254013aea2120357ccacbed00253f9c2b5dc24e2d"}, - {file = "watchfiles-0.24.0-cp311-none-win_amd64.whl", hash = "sha256:d3dcb774e3568477275cc76554b5a565024b8ba3a0322f77c246bc7111c5bb9c"}, - {file = "watchfiles-0.24.0-cp311-none-win_arm64.whl", hash = "sha256:9301c689051a4857d5b10777da23fafb8e8e921bcf3abe6448a058d27fb67633"}, - {file = "watchfiles-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7211b463695d1e995ca3feb38b69227e46dbd03947172585ecb0588f19b0d87a"}, - {file = "watchfiles-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4b8693502d1967b00f2fb82fc1e744df128ba22f530e15b763c8d82baee15370"}, - {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdab9555053399318b953a1fe1f586e945bc8d635ce9d05e617fd9fe3a4687d6"}, - {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34e19e56d68b0dad5cff62273107cf5d9fbaf9d75c46277aa5d803b3ef8a9e9b"}, - {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41face41f036fee09eba33a5b53a73e9a43d5cb2c53dad8e61fa6c9f91b5a51e"}, - {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5148c2f1ea043db13ce9b0c28456e18ecc8f14f41325aa624314095b6aa2e9ea"}, - {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e4bd963a935aaf40b625c2499f3f4f6bbd0c3776f6d3bc7c853d04824ff1c9f"}, - {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c79d7719d027b7a42817c5d96461a99b6a49979c143839fc37aa5748c322f234"}, - {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:32aa53a9a63b7f01ed32e316e354e81e9da0e6267435c7243bf8ae0f10b428ef"}, - {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce72dba6a20e39a0c628258b5c308779b8697f7676c254a845715e2a1039b968"}, - {file = "watchfiles-0.24.0-cp312-none-win32.whl", hash = "sha256:d9018153cf57fc302a2a34cb7564870b859ed9a732d16b41a9b5cb2ebed2d444"}, - {file = "watchfiles-0.24.0-cp312-none-win_amd64.whl", hash = "sha256:551ec3ee2a3ac9cbcf48a4ec76e42c2ef938a7e905a35b42a1267fa4b1645896"}, - {file = "watchfiles-0.24.0-cp312-none-win_arm64.whl", hash = "sha256:b52a65e4ea43c6d149c5f8ddb0bef8d4a1e779b77591a458a893eb416624a418"}, - {file = "watchfiles-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2e3ab79a1771c530233cadfd277fcc762656d50836c77abb2e5e72b88e3a48"}, - {file = "watchfiles-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327763da824817b38ad125dcd97595f942d720d32d879f6c4ddf843e3da3fe90"}, - {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd82010f8ab451dabe36054a1622870166a67cf3fce894f68895db6f74bbdc94"}, - {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d64ba08db72e5dfd5c33be1e1e687d5e4fcce09219e8aee893a4862034081d4e"}, - {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1cf1f6dd7825053f3d98f6d33f6464ebdd9ee95acd74ba2c34e183086900a827"}, - {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43e3e37c15a8b6fe00c1bce2473cfa8eb3484bbeecf3aefbf259227e487a03df"}, - {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88bcd4d0fe1d8ff43675360a72def210ebad3f3f72cabfeac08d825d2639b4ab"}, - {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:999928c6434372fde16c8f27143d3e97201160b48a614071261701615a2a156f"}, - {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:30bbd525c3262fd9f4b1865cb8d88e21161366561cd7c9e1194819e0a33ea86b"}, - {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:edf71b01dec9f766fb285b73930f95f730bb0943500ba0566ae234b5c1618c18"}, - {file = "watchfiles-0.24.0-cp313-none-win32.whl", hash = "sha256:f4c96283fca3ee09fb044f02156d9570d156698bc3734252175a38f0e8975f07"}, - {file = "watchfiles-0.24.0-cp313-none-win_amd64.whl", hash = "sha256:a974231b4fdd1bb7f62064a0565a6b107d27d21d9acb50c484d2cdba515b9366"}, - {file = "watchfiles-0.24.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b665caeeda58625c3946ad7308fbd88a086ee51ccb706307e5b1fa91556ac886"}, - {file = "watchfiles-0.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5c51749f3e4e269231510da426ce4a44beb98db2dce9097225c338f815b05d4f"}, - {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b2509f08761f29a0fdad35f7e1638b8ab1adfa2666d41b794090361fb8b855"}, - {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a60e2bf9dc6afe7f743e7c9b149d1fdd6dbf35153c78fe3a14ae1a9aee3d98b"}, - {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7d9b87c4c55e3ea8881dfcbf6d61ea6775fffed1fedffaa60bd047d3c08c430"}, - {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:78470906a6be5199524641f538bd2c56bb809cd4bf29a566a75051610bc982c3"}, - {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07cdef0c84c03375f4e24642ef8d8178e533596b229d32d2bbd69e5128ede02a"}, - {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d337193bbf3e45171c8025e291530fb7548a93c45253897cd764a6a71c937ed9"}, - {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ec39698c45b11d9694a1b635a70946a5bad066b593af863460a8e600f0dff1ca"}, - {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e28d91ef48eab0afb939fa446d8ebe77e2f7593f5f463fd2bb2b14132f95b6e"}, - {file = "watchfiles-0.24.0-cp39-none-win32.whl", hash = "sha256:7138eff8baa883aeaa074359daabb8b6c1e73ffe69d5accdc907d62e50b1c0da"}, - {file = "watchfiles-0.24.0-cp39-none-win_amd64.whl", hash = "sha256:b3ef2c69c655db63deb96b3c3e587084612f9b1fa983df5e0c3379d41307467f"}, - {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:632676574429bee8c26be8af52af20e0c718cc7f5f67f3fb658c71928ccd4f7f"}, - {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a2a9891723a735d3e2540651184be6fd5b96880c08ffe1a98bae5017e65b544b"}, - {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7fa2bc0efef3e209a8199fd111b8969fe9db9c711acc46636686331eda7dd4"}, - {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01550ccf1d0aed6ea375ef259706af76ad009ef5b0203a3a4cce0f6024f9b68a"}, - {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:96619302d4374de5e2345b2b622dc481257a99431277662c30f606f3e22f42be"}, - {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:85d5f0c7771dcc7a26c7a27145059b6bb0ce06e4e751ed76cdf123d7039b60b5"}, - {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951088d12d339690a92cef2ec5d3cfd957692834c72ffd570ea76a6790222777"}, - {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fb58bcaa343fedc6a9e91f90195b20ccb3135447dc9e4e2570c3a39565853e"}, - {file = "watchfiles-0.24.0.tar.gz", hash = "sha256:afb72325b74fa7a428c009c1b8be4b4d7c2afedafb2982827ef2156646df2fe1"}, -] - -[[package]] -name = "wcwidth" -version = "0.2.13" -summary = "Measures the displayed width of unicode strings in a terminal" -groups = ["tests"] -dependencies = [ - "backports-functools-lru-cache>=1.2.1; python_version < \"3.2\"", -] -files = [ - {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, - {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, -] - -[[package]] -name = "websockets" -version = "13.1" -requires_python = ">=3.8" -summary = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" -groups = ["server"] -files = [ - {file = "websockets-13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f48c749857f8fb598fb890a75f540e3221d0976ed0bf879cf3c7eef34151acee"}, - {file = "websockets-13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7e72ce6bda6fb9409cc1e8164dd41d7c91466fb599eb047cfda72fe758a34a7"}, - {file = "websockets-13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f779498eeec470295a2b1a5d97aa1bc9814ecd25e1eb637bd9d1c73a327387f6"}, - {file = "websockets-13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676df3fe46956fbb0437d8800cd5f2b6d41143b6e7e842e60554398432cf29b"}, - {file = "websockets-13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7affedeb43a70351bb811dadf49493c9cfd1ed94c9c70095fd177e9cc1541fa"}, - {file = "websockets-13.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1971e62d2caa443e57588e1d82d15f663b29ff9dfe7446d9964a4b6f12c1e700"}, - {file = "websockets-13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5f2e75431f8dc4a47f31565a6e1355fb4f2ecaa99d6b89737527ea917066e26c"}, - {file = "websockets-13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58cf7e75dbf7e566088b07e36ea2e3e2bd5676e22216e4cad108d4df4a7402a0"}, - {file = "websockets-13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c90d6dec6be2c7d03378a574de87af9b1efea77d0c52a8301dd831ece938452f"}, - {file = "websockets-13.1-cp310-cp310-win32.whl", hash = "sha256:730f42125ccb14602f455155084f978bd9e8e57e89b569b4d7f0f0c17a448ffe"}, - {file = "websockets-13.1-cp310-cp310-win_amd64.whl", hash = "sha256:5993260f483d05a9737073be197371940c01b257cc45ae3f1d5d7adb371b266a"}, - {file = "websockets-13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:61fc0dfcda609cda0fc9fe7977694c0c59cf9d749fbb17f4e9483929e3c48a19"}, - {file = "websockets-13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ceec59f59d092c5007e815def4ebb80c2de330e9588e101cf8bd94c143ec78a5"}, - {file = "websockets-13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1dca61c6db1166c48b95198c0b7d9c990b30c756fc2923cc66f68d17dc558fd"}, - {file = "websockets-13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:308e20f22c2c77f3f39caca508e765f8725020b84aa963474e18c59accbf4c02"}, - {file = "websockets-13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d516c325e6540e8a57b94abefc3459d7dab8ce52ac75c96cad5549e187e3a7"}, - {file = "websockets-13.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c6e35319b46b99e168eb98472d6c7d8634ee37750d7693656dc766395df096"}, - {file = "websockets-13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f9fee94ebafbc3117c30be1844ed01a3b177bb6e39088bc6b2fa1dc15572084"}, - {file = "websockets-13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7c1e90228c2f5cdde263253fa5db63e6653f1c00e7ec64108065a0b9713fa1b3"}, - {file = "websockets-13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6548f29b0e401eea2b967b2fdc1c7c7b5ebb3eeb470ed23a54cd45ef078a0db9"}, - {file = "websockets-13.1-cp311-cp311-win32.whl", hash = "sha256:c11d4d16e133f6df8916cc5b7e3e96ee4c44c936717d684a94f48f82edb7c92f"}, - {file = "websockets-13.1-cp311-cp311-win_amd64.whl", hash = "sha256:d04f13a1d75cb2b8382bdc16ae6fa58c97337253826dfe136195b7f89f661557"}, - {file = "websockets-13.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9d75baf00138f80b48f1eac72ad1535aac0b6461265a0bcad391fc5aba875cfc"}, - {file = "websockets-13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9b6f347deb3dcfbfde1c20baa21c2ac0751afaa73e64e5b693bb2b848efeaa49"}, - {file = "websockets-13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de58647e3f9c42f13f90ac7e5f58900c80a39019848c5547bc691693098ae1bd"}, - {file = "websockets-13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1b54689e38d1279a51d11e3467dd2f3a50f5f2e879012ce8f2d6943f00e83f0"}, - {file = "websockets-13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf1781ef73c073e6b0f90af841aaf98501f975d306bbf6221683dd594ccc52b6"}, - {file = "websockets-13.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d23b88b9388ed85c6faf0e74d8dec4f4d3baf3ecf20a65a47b836d56260d4b9"}, - {file = "websockets-13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3c78383585f47ccb0fcf186dcb8a43f5438bd7d8f47d69e0b56f71bf431a0a68"}, - {file = "websockets-13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d6d300f8ec35c24025ceb9b9019ae9040c1ab2f01cddc2bcc0b518af31c75c14"}, - {file = "websockets-13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9dcaf8b0cc72a392760bb8755922c03e17a5a54e08cca58e8b74f6902b433cf"}, - {file = "websockets-13.1-cp312-cp312-win32.whl", hash = "sha256:2f85cf4f2a1ba8f602298a853cec8526c2ca42a9a4b947ec236eaedb8f2dc80c"}, - {file = "websockets-13.1-cp312-cp312-win_amd64.whl", hash = "sha256:38377f8b0cdeee97c552d20cf1865695fcd56aba155ad1b4ca8779a5b6ef4ac3"}, - {file = "websockets-13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a9ab1e71d3d2e54a0aa646ab6d4eebfaa5f416fe78dfe4da2839525dc5d765c6"}, - {file = "websockets-13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b9d7439d7fab4dce00570bb906875734df13d9faa4b48e261c440a5fec6d9708"}, - {file = "websockets-13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327b74e915cf13c5931334c61e1a41040e365d380f812513a255aa804b183418"}, - {file = "websockets-13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:325b1ccdbf5e5725fdcb1b0e9ad4d2545056479d0eee392c291c1bf76206435a"}, - {file = "websockets-13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:346bee67a65f189e0e33f520f253d5147ab76ae42493804319b5716e46dddf0f"}, - {file = "websockets-13.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91a0fa841646320ec0d3accdff5b757b06e2e5c86ba32af2e0815c96c7a603c5"}, - {file = "websockets-13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:18503d2c5f3943e93819238bf20df71982d193f73dcecd26c94514f417f6b135"}, - {file = "websockets-13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9cd1af7e18e5221d2878378fbc287a14cd527fdd5939ed56a18df8a31136bb2"}, - {file = "websockets-13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:70c5be9f416aa72aab7a2a76c90ae0a4fe2755c1816c153c1a2bcc3333ce4ce6"}, - {file = "websockets-13.1-cp313-cp313-win32.whl", hash = "sha256:624459daabeb310d3815b276c1adef475b3e6804abaf2d9d2c061c319f7f187d"}, - {file = "websockets-13.1-cp313-cp313-win_amd64.whl", hash = "sha256:c518e84bb59c2baae725accd355c8dc517b4a3ed8db88b4bc93c78dae2974bf2"}, - {file = "websockets-13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9b37c184f8b976f0c0a231a5f3d6efe10807d41ccbe4488df8c74174805eea7d"}, - {file = "websockets-13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:163e7277e1a0bd9fb3c8842a71661ad19c6aa7bb3d6678dc7f89b17fbcc4aeb7"}, - {file = "websockets-13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4b889dbd1342820cc210ba44307cf75ae5f2f96226c0038094455a96e64fb07a"}, - {file = "websockets-13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:586a356928692c1fed0eca68b4d1c2cbbd1ca2acf2ac7e7ebd3b9052582deefa"}, - {file = "websockets-13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7bd6abf1e070a6b72bfeb71049d6ad286852e285f146682bf30d0296f5fbadfa"}, - {file = "websockets-13.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2aad13a200e5934f5a6767492fb07151e1de1d6079c003ab31e1823733ae79"}, - {file = "websockets-13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:df01aea34b6e9e33572c35cd16bae5a47785e7d5c8cb2b54b2acdb9678315a17"}, - {file = "websockets-13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e54affdeb21026329fb0744ad187cf812f7d3c2aa702a5edb562b325191fcab6"}, - {file = "websockets-13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ef8aa8bdbac47f4968a5d66462a2a0935d044bf35c0e5a8af152d58516dbeb5"}, - {file = "websockets-13.1-cp39-cp39-win32.whl", hash = "sha256:deeb929efe52bed518f6eb2ddc00cc496366a14c726005726ad62c2dd9017a3c"}, - {file = "websockets-13.1-cp39-cp39-win_amd64.whl", hash = "sha256:7c65ffa900e7cc958cd088b9a9157a8141c991f8c53d11087e6fb7277a03f81d"}, - {file = "websockets-13.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5dd6da9bec02735931fccec99d97c29f47cc61f644264eb995ad6c0c27667238"}, - {file = "websockets-13.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:2510c09d8e8df777177ee3d40cd35450dc169a81e747455cc4197e63f7e7bfe5"}, - {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1c3cf67185543730888b20682fb186fc8d0fa6f07ccc3ef4390831ab4b388d9"}, - {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcc03c8b72267e97b49149e4863d57c2d77f13fae12066622dc78fe322490fe6"}, - {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004280a140f220c812e65f36944a9ca92d766b6cc4560be652a0a3883a79ed8a"}, - {file = "websockets-13.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2620453c075abeb0daa949a292e19f56de518988e079c36478bacf9546ced23"}, - {file = "websockets-13.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:25c35bf84bf7c7369d247f0b8cfa157f989862c49104c5cf85cb5436a641d93e"}, - {file = "websockets-13.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:83f91d8a9bb404b8c2c41a707ac7f7f75b9442a0a876df295de27251a856ad09"}, - {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a43cfdcddd07f4ca2b1afb459824dd3c6d53a51410636a2c7fc97b9a8cf4842"}, - {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a2ef1381632a2f0cb4efeff34efa97901c9fbc118e01951ad7cfc10601a9bb"}, - {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459bf774c754c35dbb487360b12c5727adab887f1622b8aed5755880a21c4a20"}, - {file = "websockets-13.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:95858ca14a9f6fa8413d29e0a585b31b278388aa775b8a81fa24830123874678"}, - {file = "websockets-13.1-py3-none-any.whl", hash = "sha256:a9a396a6ad26130cdae92ae10c36af09d9bfe6cafe69670fd3b6da9b07b4044f"}, - {file = "websockets-13.1.tar.gz", hash = "sha256:a3b3366087c1bc0a2795111edcadddb8b3b59509d5db5d7ea3fdd69f954a8878"}, -] - -[[package]] -name = "zipp" -version = "3.21.0" -requires_python = ">=3.9" -summary = "Backport of pathlib-compatible object wrapper for zip files" -groups = ["tests"] -marker = "python_version < \"3.10\"" -files = [ - {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, - {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, -] +# This file is @generated by PDM. +# It is not intended for manual editing. + +[metadata] +groups = ["default", "server", "tests"] +strategy = ["inherit_metadata"] +lock_version = "4.5.0" +content_hash = "sha256:48620a5f03867071175c529837f308437d8b523a041a4834e63223a3b8bed639" + +[[metadata.targets]] +requires_python = ">=3.9.1" + +[[package]] +name = "aenum" +version = "3.1.15" +summary = "Advanced Enumerations (compatible with Python's stdlib Enum), NamedTuples, and NamedConstants" +groups = ["default"] +files = [ + {file = "aenum-3.1.15-py3-none-any.whl", hash = "sha256:e0dfaeea4c2bd362144b87377e2c61d91958c5ed0b4daf89cb6f45ae23af6288"}, + {file = "aenum-3.1.15.tar.gz", hash = "sha256:8cbd76cd18c4f870ff39b24284d3ea028fbe8731a58df3aa581e434c575b9559"}, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +requires_python = ">=3.8" +summary = "Reusable constraint types to use with typing.Annotated" +groups = ["default", "server"] +dependencies = [ + "typing-extensions>=4.0.0; python_version < \"3.9\"", +] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.6.2.post1" +requires_python = ">=3.9" +summary = "High level compatibility layer for multiple asynchronous event loop implementations" +groups = ["server"] +dependencies = [ + "exceptiongroup>=1.0.2; python_version < \"3.11\"", + "idna>=2.8", + "sniffio>=1.1", + "typing-extensions>=4.1; python_version < \"3.11\"", +] +files = [ + {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, + {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, +] + +[[package]] +name = "appnope" +version = "0.1.4" +requires_python = ">=3.6" +summary = "Disable App Nap on macOS >= 10.9" +groups = ["tests"] +marker = "platform_system == \"Darwin\"" +files = [ + {file = "appnope-0.1.4-py2.py3-none-any.whl", hash = "sha256:502575ee11cd7a28c0205f379b525beefebab9d161b7c964670864014ed7213c"}, + {file = "appnope-0.1.4.tar.gz", hash = "sha256:1de3860566df9caf38f01f86f65e0e13e379af54f9e4bee1e66b48f2efffd1ee"}, +] + +[[package]] +name = "asttokens" +version = "2.4.1" +summary = "Annotate AST trees with source code positions" +groups = ["tests"] +dependencies = [ + "six>=1.12.0", + "typing; python_version < \"3.5\"", +] +files = [ + {file = "asttokens-2.4.1-py2.py3-none-any.whl", hash = "sha256:051ed49c3dcae8913ea7cd08e46a606dba30b79993209636c4875bc1d637bc24"}, + {file = "asttokens-2.4.1.tar.gz", hash = "sha256:b03869718ba9a6eb027e134bfdf69f38a236d681c83c160d510768af11254ba0"}, +] + +[[package]] +name = "certifi" +version = "2024.8.30" +requires_python = ">=3.6" +summary = "Python package for providing Mozilla's CA Bundle." +groups = ["default"] +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "cffi" +version = "1.17.1" +requires_python = ">=3.8" +summary = "Foreign Function Interface for Python calling C code." +groups = ["tests"] +marker = "implementation_name == \"pypy\"" +dependencies = [ + "pycparser", +] +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.0" +requires_python = ">=3.7.0" +summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +groups = ["default"] +files = [ + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +] + +[[package]] +name = "click" +version = "8.1.7" +requires_python = ">=3.7" +summary = "Composable command line interface toolkit" +groups = ["server"] +dependencies = [ + "colorama; platform_system == \"Windows\"", + "importlib-metadata; python_version < \"3.8\"", +] +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +summary = "Cross-platform colored terminal text." +groups = ["server", "tests"] +marker = "sys_platform == \"win32\" or platform_system == \"Windows\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "comm" +version = "0.2.2" +requires_python = ">=3.8" +summary = "Jupyter Python Comm implementation, for usage in ipykernel, xeus-python etc." +groups = ["tests"] +dependencies = [ + "traitlets>=4", +] +files = [ + {file = "comm-0.2.2-py3-none-any.whl", hash = "sha256:e6fb86cb70ff661ee8c9c14e7d36d6de3b4066f1441be4063df9c5009f0a64d3"}, + {file = "comm-0.2.2.tar.gz", hash = "sha256:3fd7a84065306e07bea1773df6eb8282de51ba82f77c72f9c85716ab11fe980e"}, +] + +[[package]] +name = "debugpy" +version = "1.8.8" +requires_python = ">=3.8" +summary = "An implementation of the Debug Adapter Protocol for Python" +groups = ["tests"] +files = [ + {file = "debugpy-1.8.8-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:e59b1607c51b71545cb3496876544f7186a7a27c00b436a62f285603cc68d1c6"}, + {file = "debugpy-1.8.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6531d952b565b7cb2fbd1ef5df3d333cf160b44f37547a4e7cf73666aca5d8d"}, + {file = "debugpy-1.8.8-cp310-cp310-win32.whl", hash = "sha256:b01f4a5e5c5fb1d34f4ccba99a20ed01eabc45a4684f4948b5db17a319dfb23f"}, + {file = "debugpy-1.8.8-cp310-cp310-win_amd64.whl", hash = "sha256:535f4fb1c024ddca5913bb0eb17880c8f24ba28aa2c225059db145ee557035e9"}, + {file = "debugpy-1.8.8-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:c399023146e40ae373753a58d1be0a98bf6397fadc737b97ad612886b53df318"}, + {file = "debugpy-1.8.8-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09cc7b162586ea2171eea055985da2702b0723f6f907a423c9b2da5996ad67ba"}, + {file = "debugpy-1.8.8-cp311-cp311-win32.whl", hash = "sha256:eea8821d998ebeb02f0625dd0d76839ddde8cbf8152ebbe289dd7acf2cdc6b98"}, + {file = "debugpy-1.8.8-cp311-cp311-win_amd64.whl", hash = "sha256:d4483836da2a533f4b1454dffc9f668096ac0433de855f0c22cdce8c9f7e10c4"}, + {file = "debugpy-1.8.8-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:0cc94186340be87b9ac5a707184ec8f36547fb66636d1029ff4f1cc020e53996"}, + {file = "debugpy-1.8.8-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64674e95916e53c2e9540a056e5f489e0ad4872645399d778f7c598eacb7b7f9"}, + {file = "debugpy-1.8.8-cp312-cp312-win32.whl", hash = "sha256:5c6e885dbf12015aed73770f29dec7023cb310d0dc2ba8bfbeb5c8e43f80edc9"}, + {file = "debugpy-1.8.8-cp312-cp312-win_amd64.whl", hash = "sha256:19ffbd84e757a6ca0113574d1bf5a2298b3947320a3e9d7d8dc3377f02d9f864"}, + {file = "debugpy-1.8.8-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:705cd123a773d184860ed8dae99becd879dfec361098edbefb5fc0d3683eb804"}, + {file = "debugpy-1.8.8-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:890fd16803f50aa9cb1a9b9b25b5ec321656dd6b78157c74283de241993d086f"}, + {file = "debugpy-1.8.8-cp313-cp313-win32.whl", hash = "sha256:90244598214bbe704aa47556ec591d2f9869ff9e042e301a2859c57106649add"}, + {file = "debugpy-1.8.8-cp313-cp313-win_amd64.whl", hash = "sha256:4b93e4832fd4a759a0c465c967214ed0c8a6e8914bced63a28ddb0dd8c5f078b"}, + {file = "debugpy-1.8.8-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:53709d4ec586b525724819dc6af1a7703502f7e06f34ded7157f7b1f963bb854"}, + {file = "debugpy-1.8.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a9c013077a3a0000e83d97cf9cc9328d2b0bbb31f56b0e99ea3662d29d7a6a2"}, + {file = "debugpy-1.8.8-cp39-cp39-win32.whl", hash = "sha256:ffe94dd5e9a6739a75f0b85316dc185560db3e97afa6b215628d1b6a17561cb2"}, + {file = "debugpy-1.8.8-cp39-cp39-win_amd64.whl", hash = "sha256:5c0e5a38c7f9b481bf31277d2f74d2109292179081f11108e668195ef926c0f9"}, + {file = "debugpy-1.8.8-py2.py3-none-any.whl", hash = "sha256:ec684553aba5b4066d4de510859922419febc710df7bba04fe9e7ef3de15d34f"}, + {file = "debugpy-1.8.8.zip", hash = "sha256:e6355385db85cbd666be703a96ab7351bc9e6c61d694893206f8001e22aee091"}, +] + +[[package]] +name = "decorator" +version = "5.1.1" +requires_python = ">=3.5" +summary = "Decorators for Humans" +groups = ["tests"] +files = [ + {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, + {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +requires_python = ">=3.7" +summary = "Backport of PEP 654 (exception groups)" +groups = ["server", "tests"] +marker = "python_version < \"3.11\"" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[[package]] +name = "executing" +version = "2.1.0" +requires_python = ">=3.8" +summary = "Get the currently executing AST node of a frame, and other information" +groups = ["tests"] +files = [ + {file = "executing-2.1.0-py2.py3-none-any.whl", hash = "sha256:8d63781349375b5ebccc3142f4b30350c0cd9c79f921cde38be2be4637e98eaf"}, + {file = "executing-2.1.0.tar.gz", hash = "sha256:8ea27ddd260da8150fa5a708269c4a10e76161e2496ec3e587da9e3c0fe4b9ab"}, +] + +[[package]] +name = "fastapi" +version = "0.115.4" +requires_python = ">=3.8" +summary = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +groups = ["server"] +dependencies = [ + "pydantic!=1.8,!=1.8.1,!=2.0.0,!=2.0.1,!=2.1.0,<3.0.0,>=1.7.4", + "starlette<0.42.0,>=0.40.0", + "typing-extensions>=4.8.0", +] +files = [ + {file = "fastapi-0.115.4-py3-none-any.whl", hash = "sha256:0b504a063ffb3cf96a5e27dc1bc32c80ca743a2528574f9cdc77daa2d31b4742"}, + {file = "fastapi-0.115.4.tar.gz", hash = "sha256:db653475586b091cb8b2fec2ac54a680ac6a158e07406e1abae31679e8826349"}, +] + +[[package]] +name = "greenlet" +version = "3.1.1" +requires_python = ">=3.7" +summary = "Lightweight in-process concurrent programming" +groups = ["default"] +marker = "(platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\"" +files = [ + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, +] + +[[package]] +name = "h11" +version = "0.14.0" +requires_python = ">=3.7" +summary = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +groups = ["server"] +dependencies = [ + "typing-extensions; python_version < \"3.8\"", +] +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httptools" +version = "0.6.4" +requires_python = ">=3.8.0" +summary = "A collection of framework independent HTTP protocol utils." +groups = ["server"] +files = [ + {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4"}, + {file = "httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988"}, + {file = "httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"}, + {file = "httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0"}, + {file = "httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85797e37e8eeaa5439d33e556662cc370e474445d5fab24dcadc65a8ffb04003"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:db353d22843cf1028f43c3651581e4bb49374d85692a85f95f7b9a130e1b2cab"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ffd262a73d7c28424252381a5b854c19d9de5f56f075445d33919a637e3547"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c346571fa50d2e9856a37d7cd9435a25e7fd15e236c397bf224afaa355fe9"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aafe0f1918ed07b67c1e838f950b1c1fabc683030477e60b335649b8020e1076"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0e563e54979e97b6d13f1bbc05a96109923e76b901f786a5eae36e99c01237bd"}, + {file = "httptools-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:b799de31416ecc589ad79dd85a0b2657a8fe39327944998dea368c1d4c9e55e6"}, + {file = "httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c"}, +] + +[[package]] +name = "idna" +version = "3.10" +requires_python = ">=3.6" +summary = "Internationalized Domain Names in Applications (IDNA)" +groups = ["default", "server"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[[package]] +name = "importlib-metadata" +version = "8.5.0" +requires_python = ">=3.8" +summary = "Read metadata from Python packages" +groups = ["tests"] +marker = "python_version < \"3.10\"" +dependencies = [ + "typing-extensions>=3.6.4; python_version < \"3.8\"", + "zipp>=3.20", +] +files = [ + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, +] + +[[package]] +name = "ipykernel" +version = "6.29.5" +requires_python = ">=3.8" +summary = "IPython Kernel for Jupyter" +groups = ["tests"] +dependencies = [ + "appnope; platform_system == \"Darwin\"", + "comm>=0.1.1", + "debugpy>=1.6.5", + "ipython>=7.23.1", + "jupyter-client>=6.1.12", + "jupyter-core!=5.0.*,>=4.12", + "matplotlib-inline>=0.1", + "nest-asyncio", + "packaging", + "psutil", + "pyzmq>=24", + "tornado>=6.1", + "traitlets>=5.4.0", +] +files = [ + {file = "ipykernel-6.29.5-py3-none-any.whl", hash = "sha256:afdb66ba5aa354b09b91379bac28ae4afebbb30e8b39510c9690afb7a10421b5"}, + {file = "ipykernel-6.29.5.tar.gz", hash = "sha256:f093a22c4a40f8828f8e330a9c297cb93dcab13bd9678ded6de8e5cf81c56215"}, +] + +[[package]] +name = "ipython" +version = "8.18.1" +requires_python = ">=3.9" +summary = "IPython: Productive Interactive Computing" +groups = ["tests"] +dependencies = [ + "colorama; sys_platform == \"win32\"", + "decorator", + "exceptiongroup; python_version < \"3.11\"", + "jedi>=0.16", + "matplotlib-inline", + "pexpect>4.3; sys_platform != \"win32\"", + "prompt-toolkit<3.1.0,>=3.0.41", + "pygments>=2.4.0", + "stack-data", + "traitlets>=5", + "typing-extensions; python_version < \"3.10\"", +] +files = [ + {file = "ipython-8.18.1-py3-none-any.whl", hash = "sha256:e8267419d72d81955ec1177f8a29aaa90ac80ad647499201119e2f05e99aa397"}, + {file = "ipython-8.18.1.tar.gz", hash = "sha256:ca6f079bb33457c66e233e4580ebfc4128855b4cf6370dddd73842a9563e8a27"}, +] + +[[package]] +name = "jedi" +version = "0.19.2" +requires_python = ">=3.6" +summary = "An autocompletion tool for Python that can be used for text editors." +groups = ["tests"] +dependencies = [ + "parso<0.9.0,>=0.8.4", +] +files = [ + {file = "jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9"}, + {file = "jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0"}, +] + +[[package]] +name = "jupyter-client" +version = "8.6.3" +requires_python = ">=3.8" +summary = "Jupyter protocol implementation and client libraries" +groups = ["tests"] +dependencies = [ + "importlib-metadata>=4.8.3; python_version < \"3.10\"", + "jupyter-core!=5.0.*,>=4.12", + "python-dateutil>=2.8.2", + "pyzmq>=23.0", + "tornado>=6.2", + "traitlets>=5.3", +] +files = [ + {file = "jupyter_client-8.6.3-py3-none-any.whl", hash = "sha256:e8a19cc986cc45905ac3362915f410f3af85424b4c0905e94fa5f2cb08e8f23f"}, + {file = "jupyter_client-8.6.3.tar.gz", hash = "sha256:35b3a0947c4a6e9d589eb97d7d4cd5e90f910ee73101611f01283732bd6d9419"}, +] + +[[package]] +name = "jupyter-core" +version = "5.7.2" +requires_python = ">=3.8" +summary = "Jupyter core package. A base package on which Jupyter projects rely." +groups = ["tests"] +dependencies = [ + "platformdirs>=2.5", + "pywin32>=300; sys_platform == \"win32\" and platform_python_implementation != \"PyPy\"", + "traitlets>=5.3", +] +files = [ + {file = "jupyter_core-5.7.2-py3-none-any.whl", hash = "sha256:4f7315d2f6b4bcf2e3e7cb6e46772eba760ae459cd1f59d29eb57b0a01bd7409"}, + {file = "jupyter_core-5.7.2.tar.gz", hash = "sha256:aa5f8d32bbf6b431ac830496da7392035d6f61b4f54872f15c4bd2a9c3f536d9"}, +] + +[[package]] +name = "matplotlib-inline" +version = "0.1.7" +requires_python = ">=3.8" +summary = "Inline Matplotlib backend for Jupyter" +groups = ["tests"] +dependencies = [ + "traitlets", +] +files = [ + {file = "matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca"}, + {file = "matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90"}, +] + +[[package]] +name = "nest-asyncio" +version = "1.6.0" +requires_python = ">=3.5" +summary = "Patch asyncio to allow nested event loops" +groups = ["tests"] +files = [ + {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, + {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, +] + +[[package]] +name = "packaging" +version = "24.2" +requires_python = ">=3.8" +summary = "Core utilities for Python packages" +groups = ["tests"] +files = [ + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, +] + +[[package]] +name = "parso" +version = "0.8.4" +requires_python = ">=3.6" +summary = "A Python Parser" +groups = ["tests"] +files = [ + {file = "parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18"}, + {file = "parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d"}, +] + +[[package]] +name = "pexpect" +version = "4.9.0" +summary = "Pexpect allows easy control of interactive console applications." +groups = ["tests"] +marker = "sys_platform != \"win32\"" +dependencies = [ + "ptyprocess>=0.5", +] +files = [ + {file = "pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523"}, + {file = "pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +requires_python = ">=3.8" +summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +groups = ["tests"] +files = [ + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, +] + +[[package]] +name = "prompt-toolkit" +version = "3.0.48" +requires_python = ">=3.7.0" +summary = "Library for building powerful interactive command lines in Python" +groups = ["tests"] +dependencies = [ + "wcwidth", +] +files = [ + {file = "prompt_toolkit-3.0.48-py3-none-any.whl", hash = "sha256:f49a827f90062e411f1ce1f854f2aedb3c23353244f8108b89283587397ac10e"}, + {file = "prompt_toolkit-3.0.48.tar.gz", hash = "sha256:d6623ab0477a80df74e646bdbc93621143f5caf104206aa29294d53de1a03d90"}, +] + +[[package]] +name = "psutil" +version = "6.1.0" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +summary = "Cross-platform lib for process and system monitoring in Python." +groups = ["tests"] +files = [ + {file = "psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a"}, + {file = "psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e"}, + {file = "psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be"}, + {file = "psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a"}, +] + +[[package]] +name = "ptyprocess" +version = "0.7.0" +summary = "Run a subprocess in a pseudo terminal" +groups = ["tests"] +marker = "sys_platform != \"win32\"" +files = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] + +[[package]] +name = "pure-eval" +version = "0.2.3" +summary = "Safely evaluate AST nodes without side effects" +groups = ["tests"] +files = [ + {file = "pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0"}, + {file = "pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42"}, +] + +[[package]] +name = "pycparser" +version = "2.22" +requires_python = ">=3.8" +summary = "C parser in Python" +groups = ["tests"] +marker = "implementation_name == \"pypy\"" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pydantic" +version = "2.9.2" +requires_python = ">=3.8" +summary = "Data validation using Python type hints" +groups = ["default", "server"] +dependencies = [ + "annotated-types>=0.6.0", + "pydantic-core==2.23.4", + "typing-extensions>=4.12.2; python_version >= \"3.13\"", + "typing-extensions>=4.6.1; python_version < \"3.13\"", +] +files = [ + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, +] + +[[package]] +name = "pydantic-core" +version = "2.23.4" +requires_python = ">=3.8" +summary = "Core functionality for Pydantic validation and serialization" +groups = ["default", "server"] +dependencies = [ + "typing-extensions!=4.7.0,>=4.6.0", +] +files = [ + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, +] + +[[package]] +name = "pygments" +version = "2.18.0" +requires_python = ">=3.8" +summary = "Pygments is a syntax highlighting package written in Python." +groups = ["tests"] +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +summary = "Extensions to the standard Python datetime module" +groups = ["tests"] +dependencies = [ + "six>=1.5", +] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[[package]] +name = "python-dotenv" +version = "1.0.1" +requires_python = ">=3.8" +summary = "Read key-value pairs from a .env file and set them as environment variables" +groups = ["default", "server"] +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[[package]] +name = "python-multipart" +version = "0.0.17" +requires_python = ">=3.8" +summary = "A streaming multipart parser for Python" +groups = ["server"] +files = [ + {file = "python_multipart-0.0.17-py3-none-any.whl", hash = "sha256:15dc4f487e0a9476cc1201261188ee0940165cffc94429b6fc565c4d3045cb5d"}, + {file = "python_multipart-0.0.17.tar.gz", hash = "sha256:41330d831cae6e2f22902704ead2826ea038d0419530eadff3ea80175aec5538"}, +] + +[[package]] +name = "python-ulid" +version = "3.0.0" +requires_python = ">=3.9" +summary = "Universally unique lexicographically sortable identifier" +groups = ["default"] +files = [ + {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, + {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, +] + +[[package]] +name = "python-ulid" +version = "3.0.0" +extras = ["pydantic"] +requires_python = ">=3.9" +summary = "Universally unique lexicographically sortable identifier" +groups = ["default"] +dependencies = [ + "pydantic>=2.0", + "python-ulid==3.0.0", +] +files = [ + {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, + {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, +] + +[[package]] +name = "pywin32" +version = "308" +summary = "Python for Window Extensions" +groups = ["tests"] +marker = "sys_platform == \"win32\" and platform_python_implementation != \"PyPy\"" +files = [ + {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, + {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, + {file = "pywin32-308-cp310-cp310-win_arm64.whl", hash = "sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c"}, + {file = "pywin32-308-cp311-cp311-win32.whl", hash = "sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a"}, + {file = "pywin32-308-cp311-cp311-win_amd64.whl", hash = "sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b"}, + {file = "pywin32-308-cp311-cp311-win_arm64.whl", hash = "sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6"}, + {file = "pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897"}, + {file = "pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47"}, + {file = "pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091"}, + {file = "pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed"}, + {file = "pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4"}, + {file = "pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd"}, + {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, + {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +requires_python = ">=3.8" +summary = "YAML parser and emitter for Python" +groups = ["default", "server"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "pyzmq" +version = "26.2.0" +requires_python = ">=3.7" +summary = "Python bindings for 0MQ" +groups = ["tests"] +dependencies = [ + "cffi; implementation_name == \"pypy\"", +] +files = [ + {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:ddf33d97d2f52d89f6e6e7ae66ee35a4d9ca6f36eda89c24591b0c40205a3629"}, + {file = "pyzmq-26.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dacd995031a01d16eec825bf30802fceb2c3791ef24bcce48fa98ce40918c27b"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89289a5ee32ef6c439086184529ae060c741334b8970a6855ec0b6ad3ff28764"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5506f06d7dc6ecf1efacb4a013b1f05071bb24b76350832c96449f4a2d95091c"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ea039387c10202ce304af74def5021e9adc6297067f3441d348d2b633e8166a"}, + {file = "pyzmq-26.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a2224fa4a4c2ee872886ed00a571f5e967c85e078e8e8c2530a2fb01b3309b88"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:28ad5233e9c3b52d76196c696e362508959741e1a005fb8fa03b51aea156088f"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1c17211bc037c7d88e85ed8b7d8f7e52db6dc8eca5590d162717c654550f7282"}, + {file = "pyzmq-26.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b8f86dd868d41bea9a5f873ee13bf5551c94cf6bc51baebc6f85075971fe6eea"}, + {file = "pyzmq-26.2.0-cp310-cp310-win32.whl", hash = "sha256:46a446c212e58456b23af260f3d9fb785054f3e3653dbf7279d8f2b5546b21c2"}, + {file = "pyzmq-26.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:49d34ab71db5a9c292a7644ce74190b1dd5a3475612eefb1f8be1d6961441971"}, + {file = "pyzmq-26.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:bfa832bfa540e5b5c27dcf5de5d82ebc431b82c453a43d141afb1e5d2de025fa"}, + {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:8f7e66c7113c684c2b3f1c83cdd3376103ee0ce4c49ff80a648643e57fb22218"}, + {file = "pyzmq-26.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3a495b30fc91db2db25120df5847d9833af237546fd59170701acd816ccc01c4"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77eb0968da535cba0470a5165468b2cac7772cfb569977cff92e240f57e31bef"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ace4f71f1900a548f48407fc9be59c6ba9d9aaf658c2eea6cf2779e72f9f317"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92a78853d7280bffb93df0a4a6a2498cba10ee793cc8076ef797ef2f74d107cf"}, + {file = "pyzmq-26.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:689c5d781014956a4a6de61d74ba97b23547e431e9e7d64f27d4922ba96e9d6e"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aca98bc423eb7d153214b2df397c6421ba6373d3397b26c057af3c904452e37"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1f3496d76b89d9429a656293744ceca4d2ac2a10ae59b84c1da9b5165f429ad3"}, + {file = "pyzmq-26.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5c2b3bfd4b9689919db068ac6c9911f3fcb231c39f7dd30e3138be94896d18e6"}, + {file = "pyzmq-26.2.0-cp311-cp311-win32.whl", hash = "sha256:eac5174677da084abf378739dbf4ad245661635f1600edd1221f150b165343f4"}, + {file = "pyzmq-26.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a509df7d0a83a4b178d0f937ef14286659225ef4e8812e05580776c70e155d5"}, + {file = "pyzmq-26.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:c0e6091b157d48cbe37bd67233318dbb53e1e6327d6fc3bb284afd585d141003"}, + {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:ded0fc7d90fe93ae0b18059930086c51e640cdd3baebdc783a695c77f123dcd9"}, + {file = "pyzmq-26.2.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17bf5a931c7f6618023cdacc7081f3f266aecb68ca692adac015c383a134ca52"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55cf66647e49d4621a7e20c8d13511ef1fe1efbbccf670811864452487007e08"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4661c88db4a9e0f958c8abc2b97472e23061f0bc737f6f6179d7a27024e1faa5"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea7f69de383cb47522c9c208aec6dd17697db7875a4674c4af3f8cfdac0bdeae"}, + {file = "pyzmq-26.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:7f98f6dfa8b8ccaf39163ce872bddacca38f6a67289116c8937a02e30bbe9711"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e3e0210287329272539eea617830a6a28161fbbd8a3271bf4150ae3e58c5d0e6"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6b274e0762c33c7471f1a7471d1a2085b1a35eba5cdc48d2ae319f28b6fc4de3"}, + {file = "pyzmq-26.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:29c6a4635eef69d68a00321e12a7d2559fe2dfccfa8efae3ffb8e91cd0b36a8b"}, + {file = "pyzmq-26.2.0-cp312-cp312-win32.whl", hash = "sha256:989d842dc06dc59feea09e58c74ca3e1678c812a4a8a2a419046d711031f69c7"}, + {file = "pyzmq-26.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:2a50625acdc7801bc6f74698c5c583a491c61d73c6b7ea4dee3901bb99adb27a"}, + {file = "pyzmq-26.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:4d29ab8592b6ad12ebbf92ac2ed2bedcfd1cec192d8e559e2e099f648570e19b"}, + {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9dd8cd1aeb00775f527ec60022004d030ddc51d783d056e3e23e74e623e33726"}, + {file = "pyzmq-26.2.0-cp313-cp313-macosx_10_15_universal2.whl", hash = "sha256:28c812d9757fe8acecc910c9ac9dafd2ce968c00f9e619db09e9f8f54c3a68a3"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d80b1dd99c1942f74ed608ddb38b181b87476c6a966a88a950c7dee118fdf50"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c997098cc65e3208eca09303630e84d42718620e83b733d0fd69543a9cab9cb"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad1bc8d1b7a18497dda9600b12dc193c577beb391beae5cd2349184db40f187"}, + {file = "pyzmq-26.2.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:bea2acdd8ea4275e1278350ced63da0b166421928276c7c8e3f9729d7402a57b"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:23f4aad749d13698f3f7b64aad34f5fc02d6f20f05999eebc96b89b01262fb18"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:a4f96f0d88accc3dbe4a9025f785ba830f968e21e3e2c6321ccdfc9aef755115"}, + {file = "pyzmq-26.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ced65e5a985398827cc9276b93ef6dfabe0273c23de8c7931339d7e141c2818e"}, + {file = "pyzmq-26.2.0-cp313-cp313-win32.whl", hash = "sha256:31507f7b47cc1ead1f6e86927f8ebb196a0bab043f6345ce070f412a59bf87b5"}, + {file = "pyzmq-26.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:70fc7fcf0410d16ebdda9b26cbd8bf8d803d220a7f3522e060a69a9c87bf7bad"}, + {file = "pyzmq-26.2.0-cp313-cp313-win_arm64.whl", hash = "sha256:c3789bd5768ab5618ebf09cef6ec2b35fed88709b104351748a63045f0ff9797"}, + {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:034da5fc55d9f8da09015d368f519478a52675e558c989bfcb5cf6d4e16a7d2a"}, + {file = "pyzmq-26.2.0-cp313-cp313t-macosx_10_15_universal2.whl", hash = "sha256:c92d73464b886931308ccc45b2744e5968cbaade0b1d6aeb40d8ab537765f5bc"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:794a4562dcb374f7dbbfb3f51d28fb40123b5a2abadee7b4091f93054909add5"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aee22939bb6075e7afededabad1a56a905da0b3c4e3e0c45e75810ebe3a52672"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ae90ff9dad33a1cfe947d2c40cb9cb5e600d759ac4f0fd22616ce6540f72797"}, + {file = "pyzmq-26.2.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:43a47408ac52647dfabbc66a25b05b6a61700b5165807e3fbd40063fcaf46386"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:25bf2374a2a8433633c65ccb9553350d5e17e60c8eb4de4d92cc6bd60f01d306"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_i686.whl", hash = "sha256:007137c9ac9ad5ea21e6ad97d3489af654381324d5d3ba614c323f60dab8fae6"}, + {file = "pyzmq-26.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:470d4a4f6d48fb34e92d768b4e8a5cc3780db0d69107abf1cd7ff734b9766eb0"}, + {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:b1d464cb8d72bfc1a3adc53305a63a8e0cac6bc8c5a07e8ca190ab8d3faa43c2"}, + {file = "pyzmq-26.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4da04c48873a6abdd71811c5e163bd656ee1b957971db7f35140a2d573f6949c"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:d049df610ac811dcffdc147153b414147428567fbbc8be43bb8885f04db39d98"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:05590cdbc6b902101d0e65d6a4780af14dc22914cc6ab995d99b85af45362cc9"}, + {file = "pyzmq-26.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c811cfcd6a9bf680236c40c6f617187515269ab2912f3d7e8c0174898e2519db"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6835dd60355593de10350394242b5757fbbd88b25287314316f266e24c61d073"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc6bee759a6bddea5db78d7dcd609397449cb2d2d6587f48f3ca613b19410cfc"}, + {file = "pyzmq-26.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c530e1eecd036ecc83c3407f77bb86feb79916d4a33d11394b8234f3bd35b940"}, + {file = "pyzmq-26.2.0-cp39-cp39-win32.whl", hash = "sha256:367b4f689786fca726ef7a6c5ba606958b145b9340a5e4808132cc65759abd44"}, + {file = "pyzmq-26.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:e6fa2e3e683f34aea77de8112f6483803c96a44fd726d7358b9888ae5bb394ec"}, + {file = "pyzmq-26.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:7445be39143a8aa4faec43b076e06944b8f9d0701b669df4af200531b21e40bb"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:706e794564bec25819d21a41c31d4df2d48e1cc4b061e8d345d7fb4dd3e94072"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b435f2753621cd36e7c1762156815e21c985c72b19135dac43a7f4f31d28dd1"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:160c7e0a5eb178011e72892f99f918c04a131f36056d10d9c1afb223fc952c2d"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c4a71d5d6e7b28a47a394c0471b7e77a0661e2d651e7ae91e0cab0a587859ca"}, + {file = "pyzmq-26.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:90412f2db8c02a3864cbfc67db0e3dcdbda336acf1c469526d3e869394fe001c"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:cdeabcff45d1c219636ee2e54d852262e5c2e085d6cb476d938aee8d921356b3"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35cffef589bcdc587d06f9149f8d5e9e8859920a071df5a2671de2213bef592a"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18c8dc3b7468d8b4bdf60ce9d7141897da103c7a4690157b32b60acb45e333e6"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7133d0a1677aec369d67dd78520d3fa96dd7f3dcec99d66c1762870e5ea1a50a"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6a96179a24b14fa6428cbfc08641c779a53f8fcec43644030328f44034c7f1f4"}, + {file = "pyzmq-26.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4f78c88905461a9203eac9faac157a2a0dbba84a0fd09fd29315db27be40af9f"}, + {file = "pyzmq-26.2.0.tar.gz", hash = "sha256:070672c258581c8e4f640b5159297580a9974b026043bd4ab0470be9ed324f1f"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +requires_python = ">=3.8" +summary = "Python HTTP for Humans." +groups = ["default"] +dependencies = [ + "certifi>=2017.4.17", + "charset-normalizer<4,>=2", + "idna<4,>=2.5", + "urllib3<3,>=1.21.1", +] +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[[package]] +name = "six" +version = "1.16.0" +requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +summary = "Python 2 and 3 compatibility utilities" +groups = ["tests"] +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +requires_python = ">=3.7" +summary = "Sniff out which async library your code is running under" +groups = ["server"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.36" +requires_python = ">=3.7" +summary = "Database Abstraction Library" +groups = ["default"] +dependencies = [ + "greenlet!=0.4.17; (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\"", + "importlib-metadata; python_version < \"3.8\"", + "typing-extensions>=4.6.0", +] +files = [ + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, + {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, + {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, +] + +[[package]] +name = "sqlmodel" +version = "0.0.22" +requires_python = ">=3.7" +summary = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness." +groups = ["default"] +dependencies = [ + "SQLAlchemy<2.1.0,>=2.0.14", + "pydantic<3.0.0,>=1.10.13", +] +files = [ + {file = "sqlmodel-0.0.22-py3-none-any.whl", hash = "sha256:a1ed13e28a1f4057cbf4ff6cdb4fc09e85702621d3259ba17b3c230bfb2f941b"}, + {file = "sqlmodel-0.0.22.tar.gz", hash = "sha256:7d37c882a30c43464d143e35e9ecaf945d88035e20117bf5ec2834a23cbe505e"}, +] + +[[package]] +name = "stack-data" +version = "0.6.3" +summary = "Extract data from python stack frames and tracebacks for informative displays" +groups = ["tests"] +dependencies = [ + "asttokens>=2.1.0", + "executing>=1.2.0", + "pure-eval", +] +files = [ + {file = "stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"}, + {file = "stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9"}, +] + +[[package]] +name = "starlette" +version = "0.41.2" +requires_python = ">=3.8" +summary = "The little ASGI library that shines." +groups = ["server"] +dependencies = [ + "anyio<5,>=3.4.0", + "typing-extensions>=3.10.0; python_version < \"3.10\"", +] +files = [ + {file = "starlette-0.41.2-py3-none-any.whl", hash = "sha256:fbc189474b4731cf30fcef52f18a8d070e3f3b46c6a04c97579e85e6ffca942d"}, + {file = "starlette-0.41.2.tar.gz", hash = "sha256:9834fd799d1a87fd346deb76158668cfa0b0d56f85caefe8268e2d97c3468b62"}, +] + +[[package]] +name = "tornado" +version = "6.4.1" +requires_python = ">=3.8" +summary = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +groups = ["tests"] +files = [ + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, + {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, + {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, + {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, +] + +[[package]] +name = "traitlets" +version = "5.14.3" +requires_python = ">=3.8" +summary = "Traitlets Python configuration system" +groups = ["tests"] +files = [ + {file = "traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f"}, + {file = "traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +requires_python = ">=3.8" +summary = "Backported and Experimental Type Hints for Python 3.8+" +groups = ["default", "server", "tests"] +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +requires_python = ">=3.8" +summary = "HTTP library with thread-safe connection pooling, file post, and more." +groups = ["default"] +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, +] + +[[package]] +name = "uvicorn" +version = "0.32.0" +requires_python = ">=3.8" +summary = "The lightning-fast ASGI server." +groups = ["server"] +dependencies = [ + "click>=7.0", + "h11>=0.8", + "typing-extensions>=4.0; python_version < \"3.11\"", +] +files = [ + {file = "uvicorn-0.32.0-py3-none-any.whl", hash = "sha256:60b8f3a5ac027dcd31448f411ced12b5ef452c646f76f02f8cc3f25d8d26fd82"}, + {file = "uvicorn-0.32.0.tar.gz", hash = "sha256:f78b36b143c16f54ccdb8190d0a26b5f1901fe5a3c777e1ab29f26391af8551e"}, +] + +[[package]] +name = "uvicorn" +version = "0.32.0" +extras = ["standard"] +requires_python = ">=3.8" +summary = "The lightning-fast ASGI server." +groups = ["server"] +dependencies = [ + "colorama>=0.4; sys_platform == \"win32\"", + "httptools>=0.5.0", + "python-dotenv>=0.13", + "pyyaml>=5.1", + "uvicorn==0.32.0", + "uvloop!=0.15.0,!=0.15.1,>=0.14.0; (sys_platform != \"cygwin\" and sys_platform != \"win32\") and platform_python_implementation != \"PyPy\"", + "watchfiles>=0.13", + "websockets>=10.4", +] +files = [ + {file = "uvicorn-0.32.0-py3-none-any.whl", hash = "sha256:60b8f3a5ac027dcd31448f411ced12b5ef452c646f76f02f8cc3f25d8d26fd82"}, + {file = "uvicorn-0.32.0.tar.gz", hash = "sha256:f78b36b143c16f54ccdb8190d0a26b5f1901fe5a3c777e1ab29f26391af8551e"}, +] + +[[package]] +name = "uvloop" +version = "0.21.0" +requires_python = ">=3.8.0" +summary = "Fast implementation of asyncio event loop on top of libuv" +groups = ["server"] +marker = "(sys_platform != \"cygwin\" and sys_platform != \"win32\") and platform_python_implementation != \"PyPy\"" +files = [ + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff"}, + {file = "uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3"}, +] + +[[package]] +name = "watchfiles" +version = "0.24.0" +requires_python = ">=3.8" +summary = "Simple, modern and high performance file watching and code reload in python." +groups = ["server"] +dependencies = [ + "anyio>=3.0.0", +] +files = [ + {file = "watchfiles-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:083dc77dbdeef09fa44bb0f4d1df571d2e12d8a8f985dccde71ac3ac9ac067a0"}, + {file = "watchfiles-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e94e98c7cb94cfa6e071d401ea3342767f28eb5a06a58fafdc0d2a4974f4f35c"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82ae557a8c037c42a6ef26c494d0631cacca040934b101d001100ed93d43f361"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:acbfa31e315a8f14fe33e3542cbcafc55703b8f5dcbb7c1eecd30f141df50db3"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b74fdffce9dfcf2dc296dec8743e5b0332d15df19ae464f0e249aa871fc1c571"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:449f43f49c8ddca87c6b3980c9284cab6bd1f5c9d9a2b00012adaaccd5e7decd"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4abf4ad269856618f82dee296ac66b0cd1d71450fc3c98532d93798e73399b7a"}, + {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f895d785eb6164678ff4bb5cc60c5996b3ee6df3edb28dcdeba86a13ea0465e"}, + {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ae3e208b31be8ce7f4c2c0034f33406dd24fbce3467f77223d10cd86778471c"}, + {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2efec17819b0046dde35d13fb8ac7a3ad877af41ae4640f4109d9154ed30a188"}, + {file = "watchfiles-0.24.0-cp310-none-win32.whl", hash = "sha256:6bdcfa3cd6fdbdd1a068a52820f46a815401cbc2cb187dd006cb076675e7b735"}, + {file = "watchfiles-0.24.0-cp310-none-win_amd64.whl", hash = "sha256:54ca90a9ae6597ae6dc00e7ed0a040ef723f84ec517d3e7ce13e63e4bc82fa04"}, + {file = "watchfiles-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:bdcd5538e27f188dd3c804b4a8d5f52a7fc7f87e7fd6b374b8e36a4ca03db428"}, + {file = "watchfiles-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2dadf8a8014fde6addfd3c379e6ed1a981c8f0a48292d662e27cabfe4239c83c"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6509ed3f467b79d95fc62a98229f79b1a60d1b93f101e1c61d10c95a46a84f43"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8360f7314a070c30e4c976b183d1d8d1585a4a50c5cb603f431cebcbb4f66327"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:316449aefacf40147a9efaf3bd7c9bdd35aaba9ac5d708bd1eb5763c9a02bef5"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73bde715f940bea845a95247ea3e5eb17769ba1010efdc938ffcb967c634fa61"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3770e260b18e7f4e576edca4c0a639f704088602e0bc921c5c2e721e3acb8d15"}, + {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa0fd7248cf533c259e59dc593a60973a73e881162b1a2f73360547132742823"}, + {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d7a2e3b7f5703ffbd500dabdefcbc9eafeff4b9444bbdd5d83d79eedf8428fab"}, + {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d831ee0a50946d24a53821819b2327d5751b0c938b12c0653ea5be7dea9c82ec"}, + {file = "watchfiles-0.24.0-cp311-none-win32.whl", hash = "sha256:49d617df841a63b4445790a254013aea2120357ccacbed00253f9c2b5dc24e2d"}, + {file = "watchfiles-0.24.0-cp311-none-win_amd64.whl", hash = "sha256:d3dcb774e3568477275cc76554b5a565024b8ba3a0322f77c246bc7111c5bb9c"}, + {file = "watchfiles-0.24.0-cp311-none-win_arm64.whl", hash = "sha256:9301c689051a4857d5b10777da23fafb8e8e921bcf3abe6448a058d27fb67633"}, + {file = "watchfiles-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7211b463695d1e995ca3feb38b69227e46dbd03947172585ecb0588f19b0d87a"}, + {file = "watchfiles-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4b8693502d1967b00f2fb82fc1e744df128ba22f530e15b763c8d82baee15370"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdab9555053399318b953a1fe1f586e945bc8d635ce9d05e617fd9fe3a4687d6"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34e19e56d68b0dad5cff62273107cf5d9fbaf9d75c46277aa5d803b3ef8a9e9b"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41face41f036fee09eba33a5b53a73e9a43d5cb2c53dad8e61fa6c9f91b5a51e"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5148c2f1ea043db13ce9b0c28456e18ecc8f14f41325aa624314095b6aa2e9ea"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e4bd963a935aaf40b625c2499f3f4f6bbd0c3776f6d3bc7c853d04824ff1c9f"}, + {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c79d7719d027b7a42817c5d96461a99b6a49979c143839fc37aa5748c322f234"}, + {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:32aa53a9a63b7f01ed32e316e354e81e9da0e6267435c7243bf8ae0f10b428ef"}, + {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce72dba6a20e39a0c628258b5c308779b8697f7676c254a845715e2a1039b968"}, + {file = "watchfiles-0.24.0-cp312-none-win32.whl", hash = "sha256:d9018153cf57fc302a2a34cb7564870b859ed9a732d16b41a9b5cb2ebed2d444"}, + {file = "watchfiles-0.24.0-cp312-none-win_amd64.whl", hash = "sha256:551ec3ee2a3ac9cbcf48a4ec76e42c2ef938a7e905a35b42a1267fa4b1645896"}, + {file = "watchfiles-0.24.0-cp312-none-win_arm64.whl", hash = "sha256:b52a65e4ea43c6d149c5f8ddb0bef8d4a1e779b77591a458a893eb416624a418"}, + {file = "watchfiles-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2e3ab79a1771c530233cadfd277fcc762656d50836c77abb2e5e72b88e3a48"}, + {file = "watchfiles-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327763da824817b38ad125dcd97595f942d720d32d879f6c4ddf843e3da3fe90"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd82010f8ab451dabe36054a1622870166a67cf3fce894f68895db6f74bbdc94"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d64ba08db72e5dfd5c33be1e1e687d5e4fcce09219e8aee893a4862034081d4e"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1cf1f6dd7825053f3d98f6d33f6464ebdd9ee95acd74ba2c34e183086900a827"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43e3e37c15a8b6fe00c1bce2473cfa8eb3484bbeecf3aefbf259227e487a03df"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88bcd4d0fe1d8ff43675360a72def210ebad3f3f72cabfeac08d825d2639b4ab"}, + {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:999928c6434372fde16c8f27143d3e97201160b48a614071261701615a2a156f"}, + {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:30bbd525c3262fd9f4b1865cb8d88e21161366561cd7c9e1194819e0a33ea86b"}, + {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:edf71b01dec9f766fb285b73930f95f730bb0943500ba0566ae234b5c1618c18"}, + {file = "watchfiles-0.24.0-cp313-none-win32.whl", hash = "sha256:f4c96283fca3ee09fb044f02156d9570d156698bc3734252175a38f0e8975f07"}, + {file = "watchfiles-0.24.0-cp313-none-win_amd64.whl", hash = "sha256:a974231b4fdd1bb7f62064a0565a6b107d27d21d9acb50c484d2cdba515b9366"}, + {file = "watchfiles-0.24.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b665caeeda58625c3946ad7308fbd88a086ee51ccb706307e5b1fa91556ac886"}, + {file = "watchfiles-0.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5c51749f3e4e269231510da426ce4a44beb98db2dce9097225c338f815b05d4f"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b2509f08761f29a0fdad35f7e1638b8ab1adfa2666d41b794090361fb8b855"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a60e2bf9dc6afe7f743e7c9b149d1fdd6dbf35153c78fe3a14ae1a9aee3d98b"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7d9b87c4c55e3ea8881dfcbf6d61ea6775fffed1fedffaa60bd047d3c08c430"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:78470906a6be5199524641f538bd2c56bb809cd4bf29a566a75051610bc982c3"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07cdef0c84c03375f4e24642ef8d8178e533596b229d32d2bbd69e5128ede02a"}, + {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d337193bbf3e45171c8025e291530fb7548a93c45253897cd764a6a71c937ed9"}, + {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ec39698c45b11d9694a1b635a70946a5bad066b593af863460a8e600f0dff1ca"}, + {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e28d91ef48eab0afb939fa446d8ebe77e2f7593f5f463fd2bb2b14132f95b6e"}, + {file = "watchfiles-0.24.0-cp39-none-win32.whl", hash = "sha256:7138eff8baa883aeaa074359daabb8b6c1e73ffe69d5accdc907d62e50b1c0da"}, + {file = "watchfiles-0.24.0-cp39-none-win_amd64.whl", hash = "sha256:b3ef2c69c655db63deb96b3c3e587084612f9b1fa983df5e0c3379d41307467f"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:632676574429bee8c26be8af52af20e0c718cc7f5f67f3fb658c71928ccd4f7f"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a2a9891723a735d3e2540651184be6fd5b96880c08ffe1a98bae5017e65b544b"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7fa2bc0efef3e209a8199fd111b8969fe9db9c711acc46636686331eda7dd4"}, + {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01550ccf1d0aed6ea375ef259706af76ad009ef5b0203a3a4cce0f6024f9b68a"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:96619302d4374de5e2345b2b622dc481257a99431277662c30f606f3e22f42be"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:85d5f0c7771dcc7a26c7a27145059b6bb0ce06e4e751ed76cdf123d7039b60b5"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951088d12d339690a92cef2ec5d3cfd957692834c72ffd570ea76a6790222777"}, + {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fb58bcaa343fedc6a9e91f90195b20ccb3135447dc9e4e2570c3a39565853e"}, + {file = "watchfiles-0.24.0.tar.gz", hash = "sha256:afb72325b74fa7a428c009c1b8be4b4d7c2afedafb2982827ef2156646df2fe1"}, +] + +[[package]] +name = "wcwidth" +version = "0.2.13" +summary = "Measures the displayed width of unicode strings in a terminal" +groups = ["tests"] +dependencies = [ + "backports-functools-lru-cache>=1.2.1; python_version < \"3.2\"", +] +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + +[[package]] +name = "websockets" +version = "13.1" +requires_python = ">=3.8" +summary = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +groups = ["server"] +files = [ + {file = "websockets-13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f48c749857f8fb598fb890a75f540e3221d0976ed0bf879cf3c7eef34151acee"}, + {file = "websockets-13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7e72ce6bda6fb9409cc1e8164dd41d7c91466fb599eb047cfda72fe758a34a7"}, + {file = "websockets-13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f779498eeec470295a2b1a5d97aa1bc9814ecd25e1eb637bd9d1c73a327387f6"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676df3fe46956fbb0437d8800cd5f2b6d41143b6e7e842e60554398432cf29b"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7affedeb43a70351bb811dadf49493c9cfd1ed94c9c70095fd177e9cc1541fa"}, + {file = "websockets-13.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1971e62d2caa443e57588e1d82d15f663b29ff9dfe7446d9964a4b6f12c1e700"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5f2e75431f8dc4a47f31565a6e1355fb4f2ecaa99d6b89737527ea917066e26c"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58cf7e75dbf7e566088b07e36ea2e3e2bd5676e22216e4cad108d4df4a7402a0"}, + {file = "websockets-13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c90d6dec6be2c7d03378a574de87af9b1efea77d0c52a8301dd831ece938452f"}, + {file = "websockets-13.1-cp310-cp310-win32.whl", hash = "sha256:730f42125ccb14602f455155084f978bd9e8e57e89b569b4d7f0f0c17a448ffe"}, + {file = "websockets-13.1-cp310-cp310-win_amd64.whl", hash = "sha256:5993260f483d05a9737073be197371940c01b257cc45ae3f1d5d7adb371b266a"}, + {file = "websockets-13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:61fc0dfcda609cda0fc9fe7977694c0c59cf9d749fbb17f4e9483929e3c48a19"}, + {file = "websockets-13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ceec59f59d092c5007e815def4ebb80c2de330e9588e101cf8bd94c143ec78a5"}, + {file = "websockets-13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1dca61c6db1166c48b95198c0b7d9c990b30c756fc2923cc66f68d17dc558fd"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:308e20f22c2c77f3f39caca508e765f8725020b84aa963474e18c59accbf4c02"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d516c325e6540e8a57b94abefc3459d7dab8ce52ac75c96cad5549e187e3a7"}, + {file = "websockets-13.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c6e35319b46b99e168eb98472d6c7d8634ee37750d7693656dc766395df096"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f9fee94ebafbc3117c30be1844ed01a3b177bb6e39088bc6b2fa1dc15572084"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7c1e90228c2f5cdde263253fa5db63e6653f1c00e7ec64108065a0b9713fa1b3"}, + {file = "websockets-13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6548f29b0e401eea2b967b2fdc1c7c7b5ebb3eeb470ed23a54cd45ef078a0db9"}, + {file = "websockets-13.1-cp311-cp311-win32.whl", hash = "sha256:c11d4d16e133f6df8916cc5b7e3e96ee4c44c936717d684a94f48f82edb7c92f"}, + {file = "websockets-13.1-cp311-cp311-win_amd64.whl", hash = "sha256:d04f13a1d75cb2b8382bdc16ae6fa58c97337253826dfe136195b7f89f661557"}, + {file = "websockets-13.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9d75baf00138f80b48f1eac72ad1535aac0b6461265a0bcad391fc5aba875cfc"}, + {file = "websockets-13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9b6f347deb3dcfbfde1c20baa21c2ac0751afaa73e64e5b693bb2b848efeaa49"}, + {file = "websockets-13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de58647e3f9c42f13f90ac7e5f58900c80a39019848c5547bc691693098ae1bd"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1b54689e38d1279a51d11e3467dd2f3a50f5f2e879012ce8f2d6943f00e83f0"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf1781ef73c073e6b0f90af841aaf98501f975d306bbf6221683dd594ccc52b6"}, + {file = "websockets-13.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d23b88b9388ed85c6faf0e74d8dec4f4d3baf3ecf20a65a47b836d56260d4b9"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3c78383585f47ccb0fcf186dcb8a43f5438bd7d8f47d69e0b56f71bf431a0a68"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d6d300f8ec35c24025ceb9b9019ae9040c1ab2f01cddc2bcc0b518af31c75c14"}, + {file = "websockets-13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9dcaf8b0cc72a392760bb8755922c03e17a5a54e08cca58e8b74f6902b433cf"}, + {file = "websockets-13.1-cp312-cp312-win32.whl", hash = "sha256:2f85cf4f2a1ba8f602298a853cec8526c2ca42a9a4b947ec236eaedb8f2dc80c"}, + {file = "websockets-13.1-cp312-cp312-win_amd64.whl", hash = "sha256:38377f8b0cdeee97c552d20cf1865695fcd56aba155ad1b4ca8779a5b6ef4ac3"}, + {file = "websockets-13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a9ab1e71d3d2e54a0aa646ab6d4eebfaa5f416fe78dfe4da2839525dc5d765c6"}, + {file = "websockets-13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b9d7439d7fab4dce00570bb906875734df13d9faa4b48e261c440a5fec6d9708"}, + {file = "websockets-13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327b74e915cf13c5931334c61e1a41040e365d380f812513a255aa804b183418"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:325b1ccdbf5e5725fdcb1b0e9ad4d2545056479d0eee392c291c1bf76206435a"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:346bee67a65f189e0e33f520f253d5147ab76ae42493804319b5716e46dddf0f"}, + {file = "websockets-13.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91a0fa841646320ec0d3accdff5b757b06e2e5c86ba32af2e0815c96c7a603c5"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:18503d2c5f3943e93819238bf20df71982d193f73dcecd26c94514f417f6b135"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9cd1af7e18e5221d2878378fbc287a14cd527fdd5939ed56a18df8a31136bb2"}, + {file = "websockets-13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:70c5be9f416aa72aab7a2a76c90ae0a4fe2755c1816c153c1a2bcc3333ce4ce6"}, + {file = "websockets-13.1-cp313-cp313-win32.whl", hash = "sha256:624459daabeb310d3815b276c1adef475b3e6804abaf2d9d2c061c319f7f187d"}, + {file = "websockets-13.1-cp313-cp313-win_amd64.whl", hash = "sha256:c518e84bb59c2baae725accd355c8dc517b4a3ed8db88b4bc93c78dae2974bf2"}, + {file = "websockets-13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9b37c184f8b976f0c0a231a5f3d6efe10807d41ccbe4488df8c74174805eea7d"}, + {file = "websockets-13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:163e7277e1a0bd9fb3c8842a71661ad19c6aa7bb3d6678dc7f89b17fbcc4aeb7"}, + {file = "websockets-13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4b889dbd1342820cc210ba44307cf75ae5f2f96226c0038094455a96e64fb07a"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:586a356928692c1fed0eca68b4d1c2cbbd1ca2acf2ac7e7ebd3b9052582deefa"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7bd6abf1e070a6b72bfeb71049d6ad286852e285f146682bf30d0296f5fbadfa"}, + {file = "websockets-13.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2aad13a200e5934f5a6767492fb07151e1de1d6079c003ab31e1823733ae79"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:df01aea34b6e9e33572c35cd16bae5a47785e7d5c8cb2b54b2acdb9678315a17"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e54affdeb21026329fb0744ad187cf812f7d3c2aa702a5edb562b325191fcab6"}, + {file = "websockets-13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ef8aa8bdbac47f4968a5d66462a2a0935d044bf35c0e5a8af152d58516dbeb5"}, + {file = "websockets-13.1-cp39-cp39-win32.whl", hash = "sha256:deeb929efe52bed518f6eb2ddc00cc496366a14c726005726ad62c2dd9017a3c"}, + {file = "websockets-13.1-cp39-cp39-win_amd64.whl", hash = "sha256:7c65ffa900e7cc958cd088b9a9157a8141c991f8c53d11087e6fb7277a03f81d"}, + {file = "websockets-13.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5dd6da9bec02735931fccec99d97c29f47cc61f644264eb995ad6c0c27667238"}, + {file = "websockets-13.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:2510c09d8e8df777177ee3d40cd35450dc169a81e747455cc4197e63f7e7bfe5"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1c3cf67185543730888b20682fb186fc8d0fa6f07ccc3ef4390831ab4b388d9"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcc03c8b72267e97b49149e4863d57c2d77f13fae12066622dc78fe322490fe6"}, + {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004280a140f220c812e65f36944a9ca92d766b6cc4560be652a0a3883a79ed8a"}, + {file = "websockets-13.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2620453c075abeb0daa949a292e19f56de518988e079c36478bacf9546ced23"}, + {file = "websockets-13.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:25c35bf84bf7c7369d247f0b8cfa157f989862c49104c5cf85cb5436a641d93e"}, + {file = "websockets-13.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:83f91d8a9bb404b8c2c41a707ac7f7f75b9442a0a876df295de27251a856ad09"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a43cfdcddd07f4ca2b1afb459824dd3c6d53a51410636a2c7fc97b9a8cf4842"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a2ef1381632a2f0cb4efeff34efa97901c9fbc118e01951ad7cfc10601a9bb"}, + {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459bf774c754c35dbb487360b12c5727adab887f1622b8aed5755880a21c4a20"}, + {file = "websockets-13.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:95858ca14a9f6fa8413d29e0a585b31b278388aa775b8a81fa24830123874678"}, + {file = "websockets-13.1-py3-none-any.whl", hash = "sha256:a9a396a6ad26130cdae92ae10c36af09d9bfe6cafe69670fd3b6da9b07b4044f"}, + {file = "websockets-13.1.tar.gz", hash = "sha256:a3b3366087c1bc0a2795111edcadddb8b3b59509d5db5d7ea3fdd69f954a8878"}, +] + +[[package]] +name = "zipp" +version = "3.21.0" +requires_python = ">=3.9" +summary = "Backport of pathlib-compatible object wrapper for zip files" +groups = ["tests"] +marker = "python_version < \"3.10\"" +files = [ + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, +] diff --git a/madsci/madsci_common/pyproject.toml b/madsci/madsci_common/pyproject.toml index 0993bcc..875364c 100644 --- a/madsci/madsci_common/pyproject.toml +++ b/madsci/madsci_common/pyproject.toml @@ -1,41 +1,41 @@ -[project] -name = "madsci.common" -dynamic = ["version"] -description = "The Modular Autonomous Discovery for Science (MADSci) Common Definitions and Utilities." -authors = [ - {name = "Tobias Ginsburg", email = "tginsburg@anl.gov"}, - {name = "Ryan D. Lewis", email = "ryan.lewis@anl.gov"}, - {name = "Casey Stone", email = "cstone@anl.gov"}, - {name = "Doga Ozgulbas", email = "dozgulbas@anl.gov"}, -] -requires-python = ">=3.9.1" -readme = "README.md" -license = {text = "MIT"} -dependencies = [ - "pydantic>=2.9.2", - "PyYAML>=6.0.2", - "sqlmodel>=0.0.22", - "python-ulid[pydantic]>=3.0.0", - "aenum>=3.1.15", - "python-dotenv>=1.0.1", - "requests>=2.32.3", -] - -[dependency-groups] -server = ["fastapi>=0.115.4", "uvicorn[standard]>=0.32.0", "python-multipart>=0.0.17"] -tests = ["ipykernel>=6.29.5"] - -[project.urls] -Homepage = "https://github.com/AD-SDL/MADSci" - -###################### -# Build Info + Tools # -###################### - -[build-system] -requires = ["pdm-backend"] -build-backend = "pdm.backend" - -[tool.pdm.version] -source = "scm" -fallback_version = "0.0.0" +[project] +name = "madsci.common" +dynamic = ["version"] +description = "The Modular Autonomous Discovery for Science (MADSci) Common Definitions and Utilities." +authors = [ + {name = "Tobias Ginsburg", email = "tginsburg@anl.gov"}, + {name = "Ryan D. Lewis", email = "ryan.lewis@anl.gov"}, + {name = "Casey Stone", email = "cstone@anl.gov"}, + {name = "Doga Ozgulbas", email = "dozgulbas@anl.gov"}, +] +requires-python = ">=3.9.1" +readme = "README.md" +license = {text = "MIT"} +dependencies = [ + "pydantic>=2.9.2", + "PyYAML>=6.0.2", + "sqlmodel>=0.0.22", + "python-ulid[pydantic]>=3.0.0", + "aenum>=3.1.15", + "python-dotenv>=1.0.1", + "requests>=2.32.3", +] + +[dependency-groups] +server = ["fastapi>=0.115.4", "uvicorn[standard]>=0.32.0", "python-multipart>=0.0.17"] +tests = ["ipykernel>=6.29.5"] + +[project.urls] +Homepage = "https://github.com/AD-SDL/MADSci" + +###################### +# Build Info + Tools # +###################### + +[build-system] +requires = ["pdm-backend"] +build-backend = "pdm.backend" + +[tool.pdm.version] +source = "scm" +fallback_version = "0.0.0" diff --git a/madsci/madsci_module/madsci/module/__init__.py b/madsci/madsci_module/madsci/module/__init__.py index 6f165a5..150ad8d 100644 --- a/madsci/madsci_module/madsci/module/__init__.py +++ b/madsci/madsci_module/madsci/module/__init__.py @@ -1 +1 @@ -"""Package for MADSci Node and Node Module helper classes.""" +"""Package for MADSci Node and Node Module helper classes.""" diff --git a/madsci/madsci_module/madsci/module/abstract_module.py b/madsci/madsci_module/madsci/module/abstract_module.py index 9f03701..ae5703c 100644 --- a/madsci/madsci_module/madsci/module/abstract_module.py +++ b/madsci/madsci_module/madsci/module/abstract_module.py @@ -1,580 +1,580 @@ -"""Base Node Module helper classes.""" - -import inspect -import threading -import time -import traceback -from pathlib import Path -from typing import Any, Callable, ClassVar, Optional, Union, get_type_hints - -from pydantic import ValidationError -from rich import print - -from madsci.common.definition_loaders import ( - node_definition_loader, -) -from madsci.common.events import MADSciEventLogger -from madsci.common.exceptions import ( - ActionMissingArgumentError, - ActionMissingFileError, - ActionNotImplementedError, -) -from madsci.common.types.action_types import ( - ActionArgumentDefinition, - ActionDefinition, - ActionFileDefinition, - ActionRequest, - ActionResult, - ActionStatus, -) -from madsci.common.types.admin_command_types import AdminCommandResponse -from madsci.common.types.base_types import Error -from madsci.common.types.event_types import Event -from madsci.common.types.module_types import ( - AdminCommands, - NodeModuleDefinition, -) -from madsci.common.types.node_types import ( - NodeDefinition, - NodeInfo, - NodeSetConfigResponse, - NodeStatus, -) -from madsci.common.utils import pretty_type_repr, threaded_daemon, threaded_task - - -def action( - func: Callable, - name: Optional[str] = None, - description: Optional[str] = None, - blocking: bool = False, -) -> Callable: - """Decorator to mark a method as an action handler.""" - func.__is_madsci_action__ = True - - # *Use provided action_name or function name - if name is None: - name = func.__name__ - # * Use provided description or function docstring - if description is None: - description = func.__doc__ - func.__madsci_action_name__ = name - func.__madsci_action_description__ = description - func.__madsci_action_blocking__ = blocking - return func - - -class AbstractNode: - """ - Base Node implementation, protocol agnostic, all node class definitions should inherit from or be based on this. - - Note that this class is abstract: it is intended to be inherited from, not used directly. - """ - - module_definition: ClassVar[NodeModuleDefinition] = None - """The module definition.""" - node_definition: ClassVar[NodeDefinition] = None - """The node definition.""" - config: ClassVar[dict[str, Any]] = {} - """The configuration of the module.""" - node_status: ClassVar[NodeStatus] = NodeStatus( - initializing=True, - ) - """The status of the module.""" - node_state: ClassVar[dict[str, Any]] = {} - """The state of the module.""" - action_handlers: ClassVar[dict[str, callable]] = {} - """The handlers for the actions that the module can perform.""" - action_history: ClassVar[dict[str, ActionResult]] = {} - """The history of the actions that the module has performed.""" - status_update_interval: ClassVar[float] = 5.0 - """The interval at which the status handler is called. Overridable by config.""" - state_update_interval: ClassVar[float] = 5.0 - """The interval at which the state handler is called. Overridable by config.""" - node_info_path: ClassVar[Optional[Path]] = None - """The path to the node info file. If unset, defaults to '.info.yaml'""" - logger: ClassVar[MADSciEventLogger] = MADSciEventLogger() - """The event logger for this node""" - - def __init__(self) -> "AbstractNode": - """Initialize the module class.""" - (self.node_definition, self.module_definition, self.config) = ( - node_definition_loader() - ) - if self.node_definition is None: - raise ValueError("Node definition not found, aborting node initialization") - if self.module_definition is None: - raise ValueError( - "Module definition not found, aborting node initialization", - ) - - # * Synthesize the node info - self.node_info = NodeInfo.from_node_and_module( - self.node_definition, - self.module_definition, - self.config, - ) - - # * Add the admin commands to the node info - self.node_info.capabilities.admin_commands = set.union( - self.node_info.capabilities.admin_commands, - { - admin_command.value - for admin_command in AdminCommands - if hasattr(self, admin_command.value) - and callable(self.__getattribute__(admin_command.value)) - }, - ) - # * Add the action decorators to the node - for action_callable in self.__class__.__dict__.values(): - if hasattr(action_callable, "__is_madsci_action__"): - self._add_action( - func=action_callable, - action_name=action_callable.__madsci_action_name__, - description=action_callable.__madsci_action_description__, - blocking=action_callable.__madsci_action_blocking__, - ) - - # * Save the node info - if self.node_info_path: - self.node_info.to_yaml(self.node_info_path) - elif self.node_definition._definition_path: - self.node_info_path = Path( - self.node_definition._definition_path, - ).with_suffix(".info.yaml") - self.node_info.to_yaml(self.node_info_path, exclude={"config_values"}) - - # * Add a lock for thread safety with blocking actions - self._action_lock = threading.Lock() - - """------------------------------------------------------------------------------------------------""" - """Node Lifecycle and Public Methods""" - """------------------------------------------------------------------------------------------------""" - - def start_node(self, config: dict[str, Any] = {}) -> None: - """Called once to start the node.""" - if self.module_definition._definition_path: - self.module_definition.to_yaml(self.module_definition._definition_path) - else: - print( - "No definition path set for module, skipping module definition update", - ) - if self.node_definition._definition_path: - self.node_definition.to_yaml(self.node_definition._definition_path) - else: - print("No definition path set for node, skipping node definition update") - - # *Check for any required config parameters that weren't set - self.config = {**self.config, **config} - for config_value in self.node_definition.config.values(): - if ( - config_value.required - and ( - config_value.name not in self.config - or self.config[config_value.name] is None - ) - and config_value.default is None - ): - print(f"Required config parameter '{config_value.name}' not set") - self.node_status.waiting_for_config.add(config_value.name) - else: - self.node_status.waiting_for_config.discard(config_value.name) - - def status_handler(self) -> None: - """Called periodically to update the node status. Should set `self.node_status`""" - - def state_handler(self) -> None: - """Called periodically to update the node state. Should set `self.node_state`""" - - def startup_handler(self) -> None: - """Called to (re)initialize the node. Should be used to open connections to devices or initialize any other resources.""" - - def shutdown_handler(self) -> None: - """Called to shut down the node. Should be used to clean up any resources.""" - - """------------------------------------------------------------------------------------------------""" - """Interface Methods""" - """------------------------------------------------------------------------------------------------""" - - def get_action_history(self) -> list[str]: - """Get the action history of the module.""" - return list(self.action_history.keys()) - - def run_action(self, action_request: ActionRequest) -> ActionResult: - """Run an action on the module.""" - self.node_status.running_actions.add(action_request.action_id) - action_response = None - arg_dict = {} - try: - arg_dict = self._parse_action_args(action_request) - self._check_required_args(action_request) - except Exception as e: - self._exception_handler(e, set_node_error=False) - action_response = action_request.failed(errors=Error.from_exception(e)) - try: - self._run_action(action_request, arg_dict) - except Exception as e: - self._exception_handler(e) - action_response = action_request.failed(errors=Error.from_exception(e)) - else: - if action_response is None: - # * Assume success if no return value and no exception - action_response = action_request.succeeded() - elif not isinstance(action_response, ActionResult): - try: - action_response = ActionResult.model_validate(action_response) - except ValidationError as e: - action_response = action_request.failed( - errors=Error.from_exception(e), - ) - finally: - self.node_status.running_actions.discard(action_request.action_id) - self.action_history[action_request.action_id] = action_response - return action_response - - def get_action_result(self, action_id: str) -> ActionResult: - """Get the status of an action on the module.""" - if action_id in self.action_history: - return self.action_history[action_id] - return ActionResult( - status=ActionStatus.FAILED, - errors=Error( - message=f"Action with id '{action_id}' not found", - error_type="ActionNotFound", - ), - ) - - def get_status(self) -> NodeStatus: - """Get the status of the module.""" - return self.node_status - - def set_config(self, new_config: dict[str, Any]) -> NodeSetConfigResponse: - """Set configuration values of the module.""" - need_reset = False - errors = [] - for config_key, config_value in new_config.items(): - try: - if config_key in self.node_definition.config: - self.config[config_key] = config_value - else: - raise ValueError(f"Invalid config parameter: {config_key}") - if self.node_definition.config[config_key].reset_on_change: - need_reset = True - except Exception as e: - errors.append(Error.from_exception(e)) - # *Check if all required parameters are set - for param in self.node_definition.config.values(): - if param.required and ( - param.name not in self.config or self.config[param.name] is None - ): - self.node_status.waiting_for_config.add(param.name) - else: - self.node_status.waiting_for_config.discard(param.name) - if need_reset and hasattr(self, "reset"): - # * Reset after a short delay to allow the response to be returned - @threaded_task - def schedule_reset() -> None: - time.sleep(2) - self.reset() - - schedule_reset() - return NodeSetConfigResponse( - success=len(errors) == 0, - errors=errors, - ) - - def run_admin_command(self, admin_command: AdminCommands) -> AdminCommandResponse: - """Run the specified administrative command on the module.""" - if self.hasattr(admin_command) and callable( - self.__getattribute__(admin_command), - ): - try: - response = self.__getattribute__(admin_command)() - if response is None: - # * Assume success if no return value - response = True - return AdminCommandResponse( - success=True, - errors=[], - ) - if isinstance(response, bool): - return AdminCommandResponse( - success=response, - errors=[], - ) - if isinstance(response, AdminCommandResponse): - return response - raise ValueError( - f"Admin command {admin_command} returned an unexpected value: {response}", - ) - except Exception as e: - self._exception_handler(e) - return AdminCommandResponse( - success=False, - errors=[Error.from_exception(e)], - ) - else: - return AdminCommandResponse( - success=False, - errors=[ - Error( - message=f"Admin command {admin_command} not implemented by this module", - error_type="AdminCommandNotImplemented", - ), - ], - ) - - def get_info(self) -> NodeInfo: - """Get information about the module.""" - return self.node_info - - def get_state(self) -> dict[str, Any]: - """Get the state of the module.""" - return self.node_state - - def get_log(self) -> list[Event]: - """Return the log of the node""" - return self.logger.get_log() - - """------------------------------------------------------------------------------------------------""" - """Internal and Private Methods""" - """------------------------------------------------------------------------------------------------""" - - def _add_action( - self, - func: Callable, - action_name: str, - description: str, - blocking: bool = False, - ) -> None: - """Add an action to the module. - - Args: - func: The function to add as an action handler - action_name: The name of the action - description: The description of the action - blocking: Whether this action blocks other actions while running - """ - # *Register the action handler - self.action_handlers[action_name] = func - - action_def = ActionDefinition( - name=action_name, - description=description, - blocking=blocking, - args=[], - files=[], - ) - # *Create basic action definition from function signature - signature = inspect.signature(func) - if signature.parameters: - for parameter_name, parameter_type in get_type_hints( - func, - include_extras=True, - ).items(): - if parameter_name == "return": - continue - if ( - parameter_name not in action_def.args - and parameter_name not in [file.name for file in action_def.files] - and parameter_name != "action" - ): - type_hint = parameter_type - description = "" - annotated_as_file = False - annotated_as_arg = False - # * If the type hint is an Annotated type, extract the type and description - # * Description here means the first string metadata in the Annotated type - if type_hint.__name__ == "Annotated": - type_hint = get_type_hints(func, include_extras=False)[ - parameter_name - ] - description = next( - ( - metadata - for metadata in parameter_type.__metadata__ - if isinstance(metadata, str) - ), - "", - ) - annotated_as_file = any( - isinstance(metadata, ActionFileDefinition) - for metadata in parameter_type.__metadata__ - ) - annotated_as_arg = not any( - isinstance(metadata, ActionArgumentDefinition) - for metadata in parameter_type.__metadata__ - ) - if annotated_as_file and annotated_as_arg: - raise ValueError( - f"Parameter '{parameter_name}' is annotated as both a file and an argument. This is not allowed.", - ) - if annotated_as_file or ( - type_hint.__name__ - in ["Path", "PurePath", "PosixPath", "WindowsPath"] - and not annotated_as_arg - ): - # * Add a file parameter to the action - action_def.files[parameter_name] = ActionFileDefinition( - name=parameter_name, - required=True, - description=description, - ) - else: - parameter_info = signature.parameters[parameter_name] - # * Add an arg to the action - default = ( - None - if parameter_info.default == inspect.Parameter.empty - else parameter_info.default - ) - - action_def.args[parameter_name] = ActionArgumentDefinition( - name=parameter_name, - type=pretty_type_repr(type_hint), - default=default, - required=default is None, - description=description, - ) - self.node_info.actions[action_name] = action_def - - def _parse_action_args( - self, - action_request: ActionRequest, - ) -> Union[ActionResult, tuple[callable, dict[str, Any]]]: - """Run an action on the module.""" - action_callable = self.action_handlers.get(action_request.action_name, None) - if action_callable is None: - raise ActionNotImplementedError( - f"Action {action_request.action_name} not implemented by this module", - ) - # * Prepare arguments for the action function. - # * If the action function has a 'state' or 'action' parameter - # * we'll pass in our state and action objects. - arg_dict = {} - parameters = inspect.signature(action_callable).parameters - if parameters.__contains__("action"): - arg_dict["action"] = action_request - if parameters.__contains__("self"): - arg_dict["self"] = self - if list(parameters.values())[-1].kind == inspect.Parameter.VAR_KEYWORD: - # * Function has **kwargs, so we can pass all action args and files - arg_dict = {**arg_dict, **action_request.args} - arg_dict = { - **arg_dict, - **{file.filename: file.file for file in action_request.files}, - } - else: - # * Pass only explicit arguments, dropping extras - for arg_name, arg_value in action_request.args.items(): - if arg_name in parameters: - arg_dict[arg_name] = arg_value - else: - self.logger.log_info(f"Ignoring unexpected argument {arg_name}") - for file in action_request.files: - if file.filename in parameters: - arg_dict[file.filename] = file - else: - self.logger.log_info(f"Ignoring unexpected file {file.filename}") - return arg_dict - - def _check_required_args( - self, - action_request: ActionRequest, - ) -> None: - for arg in self.node_info.actions[action_request.action_name].args.values(): - if arg.name not in action_request.args and arg.required: - raise ActionMissingArgumentError( - f"Missing required argument '{arg.name}'", - ) - for file in self.node_info.actions[action_request.action_name].files.values(): - if ( - not any( - arg_file.filename == file.name for arg_file in action_request.files - ) - and file.required - ): - raise ActionMissingFileError(f"Missing required file '{file.name}'") - - def _run_action( - self, - action_request: ActionRequest, - arg_dict: dict[str, Any], - ) -> ActionResult: - action_callable = self.action_handlers.get(action_request.action_name, None) - # * Perform the action here and return result - if not self.node_status.ready: - return action_request.not_ready( - error=Error( - message=f"Module is not ready: {self.node_status.description}", - error_type="ModuleNotReady", - ), - ) - self._action_lock.acquire() - try: - # * If the action is marked as blocking, set the module status to not ready for the duration of the action, otherwise release the lock immediately - if self.node_info.actions[action_request.action_name].blocking: - self.node_status.busy = True - try: - result = action_callable(**arg_dict) - except Exception as e: - self._exception_handler(e) - result = action_request.failed(errors=Error.from_exception(e)) - finally: - self.node_status.busy = False - else: - if self._action_lock.locked(): - self._action_lock.release() - try: - result = action_callable(**arg_dict) - except Exception as e: - self._exception_handler(e) - result = action_request.failed(errors=Error.from_exception(e)) - finally: - if self._action_lock.locked(): - self._action_lock.release() - if isinstance(result, ActionResult): - # * Make sure the action ID is set correctly on the result - result.action_id = action_request.action_id - return result - if result is None: - # *Assume success if no return value and no exception - return action_request.succeeded() - # * Return a failure if the action returns something unexpected - return action_request.failed( - errors=Error( - message=f"Action '{action_request.action_name}' returned an unexpected value: {result}", - ), - ) - - def _exception_handler(self, e: Exception, set_node_error: bool = True) -> None: - """Handle an exception.""" - self.node_status.errored = set_node_error - self.node_status.errors.append(Error.from_exception(e)) - traceback.print_exc() - - @threaded_daemon - def _loop_handler(self) -> None: - """Handles calling periodic handlers, like the status and state handlers""" - last_status_update = 0.0 - last_state_update = 0.0 - while True: - try: - status_update_interval = self.config.get( - "status_update_interval", - self.status_update_interval, - ) - if time.time() - last_status_update > status_update_interval: - last_status_update = time.time() - self.status_handler() - state_update_interval = self.config.get( - "state_update_interval", - self.state_update_interval, - ) - if time.time() - last_state_update > state_update_interval: - last_state_update = time.time() - self.state_handler() - time.sleep(0.1) - except Exception as e: - self._exception_handler(e) - time.sleep(0.1) +"""Base Node Module helper classes.""" + +import inspect +import threading +import time +import traceback +from pathlib import Path +from typing import Any, Callable, ClassVar, Optional, Union, get_type_hints + +from pydantic import ValidationError +from rich import print + +from madsci.common.definition_loaders import ( + node_definition_loader, +) +from madsci.common.events import MADSciEventLogger +from madsci.common.exceptions import ( + ActionMissingArgumentError, + ActionMissingFileError, + ActionNotImplementedError, +) +from madsci.common.types.action_types import ( + ActionArgumentDefinition, + ActionDefinition, + ActionFileDefinition, + ActionRequest, + ActionResult, + ActionStatus, +) +from madsci.common.types.admin_command_types import AdminCommandResponse +from madsci.common.types.base_types import Error +from madsci.common.types.event_types import Event +from madsci.common.types.module_types import ( + AdminCommands, + NodeModuleDefinition, +) +from madsci.common.types.node_types import ( + NodeDefinition, + NodeInfo, + NodeSetConfigResponse, + NodeStatus, +) +from madsci.common.utils import pretty_type_repr, threaded_daemon, threaded_task + + +def action( + func: Callable, + name: Optional[str] = None, + description: Optional[str] = None, + blocking: bool = False, +) -> Callable: + """Decorator to mark a method as an action handler.""" + func.__is_madsci_action__ = True + + # *Use provided action_name or function name + if name is None: + name = func.__name__ + # * Use provided description or function docstring + if description is None: + description = func.__doc__ + func.__madsci_action_name__ = name + func.__madsci_action_description__ = description + func.__madsci_action_blocking__ = blocking + return func + + +class AbstractNode: + """ + Base Node implementation, protocol agnostic, all node class definitions should inherit from or be based on this. + + Note that this class is abstract: it is intended to be inherited from, not used directly. + """ + + module_definition: ClassVar[NodeModuleDefinition] = None + """The module definition.""" + node_definition: ClassVar[NodeDefinition] = None + """The node definition.""" + config: ClassVar[dict[str, Any]] = {} + """The configuration of the module.""" + node_status: ClassVar[NodeStatus] = NodeStatus( + initializing=True, + ) + """The status of the module.""" + node_state: ClassVar[dict[str, Any]] = {} + """The state of the module.""" + action_handlers: ClassVar[dict[str, callable]] = {} + """The handlers for the actions that the module can perform.""" + action_history: ClassVar[dict[str, ActionResult]] = {} + """The history of the actions that the module has performed.""" + status_update_interval: ClassVar[float] = 5.0 + """The interval at which the status handler is called. Overridable by config.""" + state_update_interval: ClassVar[float] = 5.0 + """The interval at which the state handler is called. Overridable by config.""" + node_info_path: ClassVar[Optional[Path]] = None + """The path to the node info file. If unset, defaults to '.info.yaml'""" + logger: ClassVar[MADSciEventLogger] = MADSciEventLogger() + """The event logger for this node""" + + def __init__(self) -> "AbstractNode": + """Initialize the module class.""" + (self.node_definition, self.module_definition, self.config) = ( + node_definition_loader() + ) + if self.node_definition is None: + raise ValueError("Node definition not found, aborting node initialization") + if self.module_definition is None: + raise ValueError( + "Module definition not found, aborting node initialization", + ) + + # * Synthesize the node info + self.node_info = NodeInfo.from_node_and_module( + self.node_definition, + self.module_definition, + self.config, + ) + + # * Add the admin commands to the node info + self.node_info.capabilities.admin_commands = set.union( + self.node_info.capabilities.admin_commands, + { + admin_command.value + for admin_command in AdminCommands + if hasattr(self, admin_command.value) + and callable(self.__getattribute__(admin_command.value)) + }, + ) + # * Add the action decorators to the node + for action_callable in self.__class__.__dict__.values(): + if hasattr(action_callable, "__is_madsci_action__"): + self._add_action( + func=action_callable, + action_name=action_callable.__madsci_action_name__, + description=action_callable.__madsci_action_description__, + blocking=action_callable.__madsci_action_blocking__, + ) + + # * Save the node info + if self.node_info_path: + self.node_info.to_yaml(self.node_info_path) + elif self.node_definition._definition_path: + self.node_info_path = Path( + self.node_definition._definition_path, + ).with_suffix(".info.yaml") + self.node_info.to_yaml(self.node_info_path, exclude={"config_values"}) + + # * Add a lock for thread safety with blocking actions + self._action_lock = threading.Lock() + + """------------------------------------------------------------------------------------------------""" + """Node Lifecycle and Public Methods""" + """------------------------------------------------------------------------------------------------""" + + def start_node(self, config: dict[str, Any] = {}) -> None: + """Called once to start the node.""" + if self.module_definition._definition_path: + self.module_definition.to_yaml(self.module_definition._definition_path) + else: + print( + "No definition path set for module, skipping module definition update", + ) + if self.node_definition._definition_path: + self.node_definition.to_yaml(self.node_definition._definition_path) + else: + print("No definition path set for node, skipping node definition update") + + # *Check for any required config parameters that weren't set + self.config = {**self.config, **config} + for config_value in self.node_definition.config.values(): + if ( + config_value.required + and ( + config_value.name not in self.config + or self.config[config_value.name] is None + ) + and config_value.default is None + ): + print(f"Required config parameter '{config_value.name}' not set") + self.node_status.waiting_for_config.add(config_value.name) + else: + self.node_status.waiting_for_config.discard(config_value.name) + + def status_handler(self) -> None: + """Called periodically to update the node status. Should set `self.node_status`""" + + def state_handler(self) -> None: + """Called periodically to update the node state. Should set `self.node_state`""" + + def startup_handler(self) -> None: + """Called to (re)initialize the node. Should be used to open connections to devices or initialize any other resources.""" + + def shutdown_handler(self) -> None: + """Called to shut down the node. Should be used to clean up any resources.""" + + """------------------------------------------------------------------------------------------------""" + """Interface Methods""" + """------------------------------------------------------------------------------------------------""" + + def get_action_history(self) -> list[str]: + """Get the action history of the module.""" + return list(self.action_history.keys()) + + def run_action(self, action_request: ActionRequest) -> ActionResult: + """Run an action on the module.""" + self.node_status.running_actions.add(action_request.action_id) + action_response = None + arg_dict = {} + try: + arg_dict = self._parse_action_args(action_request) + self._check_required_args(action_request) + except Exception as e: + self._exception_handler(e, set_node_error=False) + action_response = action_request.failed(errors=Error.from_exception(e)) + try: + self._run_action(action_request, arg_dict) + except Exception as e: + self._exception_handler(e) + action_response = action_request.failed(errors=Error.from_exception(e)) + else: + if action_response is None: + # * Assume success if no return value and no exception + action_response = action_request.succeeded() + elif not isinstance(action_response, ActionResult): + try: + action_response = ActionResult.model_validate(action_response) + except ValidationError as e: + action_response = action_request.failed( + errors=Error.from_exception(e), + ) + finally: + self.node_status.running_actions.discard(action_request.action_id) + self.action_history[action_request.action_id] = action_response + return action_response + + def get_action_result(self, action_id: str) -> ActionResult: + """Get the status of an action on the module.""" + if action_id in self.action_history: + return self.action_history[action_id] + return ActionResult( + status=ActionStatus.FAILED, + errors=Error( + message=f"Action with id '{action_id}' not found", + error_type="ActionNotFound", + ), + ) + + def get_status(self) -> NodeStatus: + """Get the status of the module.""" + return self.node_status + + def set_config(self, new_config: dict[str, Any]) -> NodeSetConfigResponse: + """Set configuration values of the module.""" + need_reset = False + errors = [] + for config_key, config_value in new_config.items(): + try: + if config_key in self.node_definition.config: + self.config[config_key] = config_value + else: + raise ValueError(f"Invalid config parameter: {config_key}") + if self.node_definition.config[config_key].reset_on_change: + need_reset = True + except Exception as e: + errors.append(Error.from_exception(e)) + # *Check if all required parameters are set + for param in self.node_definition.config.values(): + if param.required and ( + param.name not in self.config or self.config[param.name] is None + ): + self.node_status.waiting_for_config.add(param.name) + else: + self.node_status.waiting_for_config.discard(param.name) + if need_reset and hasattr(self, "reset"): + # * Reset after a short delay to allow the response to be returned + @threaded_task + def schedule_reset() -> None: + time.sleep(2) + self.reset() + + schedule_reset() + return NodeSetConfigResponse( + success=len(errors) == 0, + errors=errors, + ) + + def run_admin_command(self, admin_command: AdminCommands) -> AdminCommandResponse: + """Run the specified administrative command on the module.""" + if self.hasattr(admin_command) and callable( + self.__getattribute__(admin_command), + ): + try: + response = self.__getattribute__(admin_command)() + if response is None: + # * Assume success if no return value + response = True + return AdminCommandResponse( + success=True, + errors=[], + ) + if isinstance(response, bool): + return AdminCommandResponse( + success=response, + errors=[], + ) + if isinstance(response, AdminCommandResponse): + return response + raise ValueError( + f"Admin command {admin_command} returned an unexpected value: {response}", + ) + except Exception as e: + self._exception_handler(e) + return AdminCommandResponse( + success=False, + errors=[Error.from_exception(e)], + ) + else: + return AdminCommandResponse( + success=False, + errors=[ + Error( + message=f"Admin command {admin_command} not implemented by this module", + error_type="AdminCommandNotImplemented", + ), + ], + ) + + def get_info(self) -> NodeInfo: + """Get information about the module.""" + return self.node_info + + def get_state(self) -> dict[str, Any]: + """Get the state of the module.""" + return self.node_state + + def get_log(self) -> list[Event]: + """Return the log of the node""" + return self.logger.get_log() + + """------------------------------------------------------------------------------------------------""" + """Internal and Private Methods""" + """------------------------------------------------------------------------------------------------""" + + def _add_action( + self, + func: Callable, + action_name: str, + description: str, + blocking: bool = False, + ) -> None: + """Add an action to the module. + + Args: + func: The function to add as an action handler + action_name: The name of the action + description: The description of the action + blocking: Whether this action blocks other actions while running + """ + # *Register the action handler + self.action_handlers[action_name] = func + + action_def = ActionDefinition( + name=action_name, + description=description, + blocking=blocking, + args=[], + files=[], + ) + # *Create basic action definition from function signature + signature = inspect.signature(func) + if signature.parameters: + for parameter_name, parameter_type in get_type_hints( + func, + include_extras=True, + ).items(): + if parameter_name == "return": + continue + if ( + parameter_name not in action_def.args + and parameter_name not in [file.name for file in action_def.files] + and parameter_name != "action" + ): + type_hint = parameter_type + description = "" + annotated_as_file = False + annotated_as_arg = False + # * If the type hint is an Annotated type, extract the type and description + # * Description here means the first string metadata in the Annotated type + if type_hint.__name__ == "Annotated": + type_hint = get_type_hints(func, include_extras=False)[ + parameter_name + ] + description = next( + ( + metadata + for metadata in parameter_type.__metadata__ + if isinstance(metadata, str) + ), + "", + ) + annotated_as_file = any( + isinstance(metadata, ActionFileDefinition) + for metadata in parameter_type.__metadata__ + ) + annotated_as_arg = not any( + isinstance(metadata, ActionArgumentDefinition) + for metadata in parameter_type.__metadata__ + ) + if annotated_as_file and annotated_as_arg: + raise ValueError( + f"Parameter '{parameter_name}' is annotated as both a file and an argument. This is not allowed.", + ) + if annotated_as_file or ( + type_hint.__name__ + in ["Path", "PurePath", "PosixPath", "WindowsPath"] + and not annotated_as_arg + ): + # * Add a file parameter to the action + action_def.files[parameter_name] = ActionFileDefinition( + name=parameter_name, + required=True, + description=description, + ) + else: + parameter_info = signature.parameters[parameter_name] + # * Add an arg to the action + default = ( + None + if parameter_info.default == inspect.Parameter.empty + else parameter_info.default + ) + + action_def.args[parameter_name] = ActionArgumentDefinition( + name=parameter_name, + type=pretty_type_repr(type_hint), + default=default, + required=default is None, + description=description, + ) + self.node_info.actions[action_name] = action_def + + def _parse_action_args( + self, + action_request: ActionRequest, + ) -> Union[ActionResult, tuple[callable, dict[str, Any]]]: + """Run an action on the module.""" + action_callable = self.action_handlers.get(action_request.action_name, None) + if action_callable is None: + raise ActionNotImplementedError( + f"Action {action_request.action_name} not implemented by this module", + ) + # * Prepare arguments for the action function. + # * If the action function has a 'state' or 'action' parameter + # * we'll pass in our state and action objects. + arg_dict = {} + parameters = inspect.signature(action_callable).parameters + if parameters.__contains__("action"): + arg_dict["action"] = action_request + if parameters.__contains__("self"): + arg_dict["self"] = self + if list(parameters.values())[-1].kind == inspect.Parameter.VAR_KEYWORD: + # * Function has **kwargs, so we can pass all action args and files + arg_dict = {**arg_dict, **action_request.args} + arg_dict = { + **arg_dict, + **{file.filename: file.file for file in action_request.files}, + } + else: + # * Pass only explicit arguments, dropping extras + for arg_name, arg_value in action_request.args.items(): + if arg_name in parameters: + arg_dict[arg_name] = arg_value + else: + self.logger.log_info(f"Ignoring unexpected argument {arg_name}") + for file in action_request.files: + if file.filename in parameters: + arg_dict[file.filename] = file + else: + self.logger.log_info(f"Ignoring unexpected file {file.filename}") + return arg_dict + + def _check_required_args( + self, + action_request: ActionRequest, + ) -> None: + for arg in self.node_info.actions[action_request.action_name].args.values(): + if arg.name not in action_request.args and arg.required: + raise ActionMissingArgumentError( + f"Missing required argument '{arg.name}'", + ) + for file in self.node_info.actions[action_request.action_name].files.values(): + if ( + not any( + arg_file.filename == file.name for arg_file in action_request.files + ) + and file.required + ): + raise ActionMissingFileError(f"Missing required file '{file.name}'") + + def _run_action( + self, + action_request: ActionRequest, + arg_dict: dict[str, Any], + ) -> ActionResult: + action_callable = self.action_handlers.get(action_request.action_name, None) + # * Perform the action here and return result + if not self.node_status.ready: + return action_request.not_ready( + error=Error( + message=f"Module is not ready: {self.node_status.description}", + error_type="ModuleNotReady", + ), + ) + self._action_lock.acquire() + try: + # * If the action is marked as blocking, set the module status to not ready for the duration of the action, otherwise release the lock immediately + if self.node_info.actions[action_request.action_name].blocking: + self.node_status.busy = True + try: + result = action_callable(**arg_dict) + except Exception as e: + self._exception_handler(e) + result = action_request.failed(errors=Error.from_exception(e)) + finally: + self.node_status.busy = False + else: + if self._action_lock.locked(): + self._action_lock.release() + try: + result = action_callable(**arg_dict) + except Exception as e: + self._exception_handler(e) + result = action_request.failed(errors=Error.from_exception(e)) + finally: + if self._action_lock.locked(): + self._action_lock.release() + if isinstance(result, ActionResult): + # * Make sure the action ID is set correctly on the result + result.action_id = action_request.action_id + return result + if result is None: + # *Assume success if no return value and no exception + return action_request.succeeded() + # * Return a failure if the action returns something unexpected + return action_request.failed( + errors=Error( + message=f"Action '{action_request.action_name}' returned an unexpected value: {result}", + ), + ) + + def _exception_handler(self, e: Exception, set_node_error: bool = True) -> None: + """Handle an exception.""" + self.node_status.errored = set_node_error + self.node_status.errors.append(Error.from_exception(e)) + traceback.print_exc() + + @threaded_daemon + def _loop_handler(self) -> None: + """Handles calling periodic handlers, like the status and state handlers""" + last_status_update = 0.0 + last_state_update = 0.0 + while True: + try: + status_update_interval = self.config.get( + "status_update_interval", + self.status_update_interval, + ) + if time.time() - last_status_update > status_update_interval: + last_status_update = time.time() + self.status_handler() + state_update_interval = self.config.get( + "state_update_interval", + self.state_update_interval, + ) + if time.time() - last_state_update > state_update_interval: + last_state_update = time.time() + self.state_handler() + time.sleep(0.1) + except Exception as e: + self._exception_handler(e) + time.sleep(0.1) diff --git a/madsci/madsci_module/madsci/module/rest_module.py b/madsci/madsci_module/madsci/module/rest_module.py index dcb91e5..95961b8 100644 --- a/madsci/madsci_module/madsci/module/rest_module.py +++ b/madsci/madsci_module/madsci/module/rest_module.py @@ -1,327 +1,327 @@ -"""REST-based Node Module helper classes.""" - -import json -import shutil -import tempfile -import time -from collections.abc import Generator -from multiprocessing import Process -from pathlib import Path, PureWindowsPath -from threading import Thread -from typing import Any, Optional, Union -from zipfile import ZipFile - -from fastapi.applications import FastAPI -from fastapi.datastructures import UploadFile -from fastapi.routing import APIRouter -from rich import print -from starlette.responses import FileResponse - -from madsci.client.node.rest_node_client import RestNodeClient -from madsci.common.types.action_types import ActionRequest, ActionResult, ActionStatus -from madsci.common.types.admin_command_types import AdminCommandResponse -from madsci.common.types.base_types import Error, new_ulid_str -from madsci.common.types.event_types import Event -from madsci.common.types.module_types import ( - AdminCommands, - NodeCapabilities, -) -from madsci.common.types.node_types import ( - NodeInfo, - NodeSetConfigResponse, - NodeStatus, -) -from madsci.common.utils import threaded_task -from madsci.module.abstract_module import ( - AbstractNode, -) - - -def action_response_to_headers(action_response: ActionResult) -> dict[str, str]: - """Converts the response to a dictionary of headers""" - return { - "x-madsci-action-id": action_response.action_id, - "x-madsci-status": str(action_response.status), - "x-madsci-data": json.dumps(action_response.data), - "x-madsci-error": json.dumps(action_response.error), - "x-madsci-files": json.dumps(action_response.files), - } - - -def action_response_from_headers(headers: dict[str, Any]) -> ActionResult: - """Creates an ActionResult from the headers of a file response""" - - return ActionResult( - action_id=headers["x-madsci-action-id"], - status=ActionStatus(headers["x-wei-status"]), - errors=json.loads(headers["x-wei-error"]), - files=json.loads(headers["x-wei-files"]), - data=json.loads(headers["x-wei-data"]), - ) - - -class ActionResultWithFiles(FileResponse): - """Action response from a REST-based module.""" - - def from_action_response(self, action_response: ActionResult) -> ActionResult: - """Create an ActionResultWithFiles from an ActionResult.""" - if len(action_response.files) == 1: - return super().__init__( - path=next(iter(action_response.files.values())), - headers=action_response_to_headers(action_response), - ) - - with tempfile.NamedTemporaryFile( - suffix=".zip", - delete=False, - ) as temp_zipfile_path: - temp_zip = ZipFile(temp_zipfile_path, "w") - for file in action_response.files: - temp_zip.write(action_response.files[file]) - action_response.files[file] = str( - PureWindowsPath(action_response.files[file]).name, - ) - - return super().__init__( - path=temp_zipfile_path, - headers=action_response_to_headers(action_response), - ) - - -class RestNode(AbstractNode): - """REST-based node implementation and helper class. Inherit from this class to create a new REST-based node class.""" - - rest_api = None - """The REST API server for the node.""" - restart_flag = False - """Whether the node should restart the REST server.""" - exit_flag = False - """Whether the node should exit.""" - capabilities: NodeCapabilities = NodeCapabilities( - **RestNodeClient.supported_capabilities.model_dump(), - ) - - """------------------------------------------------------------------------------------------------""" - """Node Lifecycle and Public Methods""" - """------------------------------------------------------------------------------------------------""" - - def start_node(self, config: dict[str, Any] = {}) -> None: - """Start the node.""" - super().start_node(config) # *Kick off protocol agnostic-startup - self._start_rest_api() - - """------------------------------------------------------------------------------------------------""" - """Interface Methods""" - """------------------------------------------------------------------------------------------------""" - - def run_action( - self, - action_name: str, - args: Optional[str] = None, - files: list[UploadFile] = [], - action_id: Optional[str] = None, - ) -> Union[ActionResult, ActionResultWithFiles]: - """Run an action on the node.""" - if args: - args = json.loads(args) - if not isinstance(args, dict): - raise ValueError("args must be a JSON object") - else: - args = {} - with tempfile.TemporaryDirectory() as temp_dir: - # * Save the uploaded files to a temporary directory - for file in files: - with (Path(temp_dir) / file.filename).open("wb") as f: - shutil.copyfileobj(file.file, f) - response = super().run_action( - ActionRequest( - action_id=action_id if action_id else new_ulid_str(), - action_name=action_name, - args=args, - files={ - file.filename: Path(temp_dir) / file.filename for file in files - }, - ), - ) - # * Return a file response if there are files to be returned - if response.files: - return ActionResultWithFiles().from_action_response( - action_response=response, - ) - # * Otherwise, return a normal action response - return ActionResult.model_validate(response) - - def get_action_result( - self, - action_id: str, - ) -> Union[ActionResult, ActionResultWithFiles]: - """Get the status of an action on the node.""" - action_response = super().get_action_result(action_id) - if action_response.files: - return ActionResultWithFiles().from_action_response( - action_response=action_response, - ) - return ActionResult.model_validate(action_response) - - def get_action_history(self) -> list[str]: - """Get the action history of the node.""" - return super().get_action_history() - - def get_status(self) -> NodeStatus: - """Get the status of the node.""" - return super().get_status() - - def get_info(self) -> NodeInfo: - """Get information about the node.""" - return super().get_info() - - def get_state(self) -> dict[str, Any]: - """Get the state of the node.""" - return super().get_state() - - def get_log(self) -> list[Event]: - """Get the log of the node""" - return super().get_log() - - def set_config(self, config_key: str, config_value: Any) -> NodeSetConfigResponse: - """Set configuration values of the node.""" - return super().set_config(config_key, config_value) - - def run_admin_command(self, admin_command: AdminCommands) -> AdminCommandResponse: - """Perform an administrative command on the node.""" - return super().run_admin_command(admin_command) - - """------------------------------------------------------------------------------------------------""" - """Admin Commands""" - """------------------------------------------------------------------------------------------------""" - - def reset(self) -> AdminCommandResponse: - """Restart the node.""" - try: - self.restart_flag = True # * Restart the REST server - self.shutdown_handler() - self.startup_handler(self.config) - except Exception as exception: - return AdminCommandResponse( - success=False, - errors=[Error.from_exception(exception)], - ) - return AdminCommandResponse( - success=True, - errors=[], - ) - - def shutdown(self) -> AdminCommandResponse: - """Shutdown the node.""" - try: - self.restart_flag = False - - @threaded_task - def shutdown_server() -> None: - """Shutdown the REST server.""" - time.sleep(2) - self.rest_server_process.terminate() - self.exit_flag = True - - shutdown_server() - except Exception as exception: - return AdminCommandResponse( - success=False, - errors=[Error.from_exception(exception)], - ) - return AdminCommandResponse( - success=True, - errors=[], - ) - - """------------------------------------------------------------------------------------------------""" - """Internal and Private Methods""" - """------------------------------------------------------------------------------------------------""" - - def _start_rest_api(self) -> None: - """Start the REST API for the node.""" - import uvicorn - - self.rest_api = FastAPI(lifespan=self._lifespan) - self._configure_routes() - self.rest_server_process = Process( - target=uvicorn.run, - args=(self.rest_api,), - kwargs={"host": self.config["host"], "port": self.config["port"]}, - daemon=True, - ) - self.rest_server_process.start() - while True: - time.sleep(1) - if self.restart_flag: - self.rest_server_process.terminate() - self.restart_flag = False - self._start_rest_api() - break - if self.exit_flag: - break - - def _startup_thread(self) -> None: - """The startup thread for the REST API.""" - try: - # * Create a clean status and mark the node as initializing - self.node_status.initializing = True - self.node_status.errored = False - self.node_status.locked = False - self.node_status.paused = False - self.startup_handler() - except Exception as exception: - # * Handle any exceptions that occurred during startup - self._exception_handler(exception) - self.node_status.errored = True - finally: - # * Mark the node as no longer initializing - print("Startup complete") - self.node_status.initializing = False - - def _lifespan(self, app: FastAPI) -> Generator[None, None, None]: # noqa: ARG002 - """The lifespan of the REST API.""" - # * Run startup on a separate thread so it doesn't block the rest server from starting - # * (module won't accept actions until startup is complete) - Thread(target=self._startup_thread, daemon=True).start() - self._loop_handler() - - yield - - try: - # * Call any shutdown logic - self.shutdown_handler() - except Exception as exception: - # * If an exception occurs during shutdown, handle it so we at least see the error in logs/terminal - self._exception_handler(exception) - - def _configure_routes(self) -> None: - """Configure the routes for the REST API.""" - self.router = APIRouter() - self.router.add_api_route("/status", self.get_status, methods=["GET"]) - self.router.add_api_route("/info", self.get_info, methods=["GET"]) - self.router.add_api_route("/state", self.get_state, methods=["GET"]) - self.router.add_api_route( - "/action", - self.run_action, - methods=["POST"], - response_model=None, - ) - self.router.add_api_route( - "/action/{action_id}", - self.get_action_result, - methods=["GET"], - response_model=None, - ) - self.router.add_api_route("/action", self.get_action_history, methods=["GET"]) - self.router.add_api_route("/config", self.set_config, methods=["POST"]) - self.router.add_api_route( - "/admin/{admin_command}", - self.run_admin_command, - methods=["POST"], - ) - self.rest_api.include_router(self.router) - - -if __name__ == "__main__": - RestNode().start_node() +"""REST-based Node Module helper classes.""" + +import json +import shutil +import tempfile +import time +from collections.abc import Generator +from multiprocessing import Process +from pathlib import Path, PureWindowsPath +from threading import Thread +from typing import Any, Optional, Union +from zipfile import ZipFile + +from fastapi.applications import FastAPI +from fastapi.datastructures import UploadFile +from fastapi.routing import APIRouter +from rich import print +from starlette.responses import FileResponse + +from madsci.client.node.rest_node_client import RestNodeClient +from madsci.common.types.action_types import ActionRequest, ActionResult, ActionStatus +from madsci.common.types.admin_command_types import AdminCommandResponse +from madsci.common.types.base_types import Error, new_ulid_str +from madsci.common.types.event_types import Event +from madsci.common.types.module_types import ( + AdminCommands, + NodeCapabilities, +) +from madsci.common.types.node_types import ( + NodeInfo, + NodeSetConfigResponse, + NodeStatus, +) +from madsci.common.utils import threaded_task +from madsci.module.abstract_module import ( + AbstractNode, +) + + +def action_response_to_headers(action_response: ActionResult) -> dict[str, str]: + """Converts the response to a dictionary of headers""" + return { + "x-madsci-action-id": action_response.action_id, + "x-madsci-status": str(action_response.status), + "x-madsci-data": json.dumps(action_response.data), + "x-madsci-error": json.dumps(action_response.error), + "x-madsci-files": json.dumps(action_response.files), + } + + +def action_response_from_headers(headers: dict[str, Any]) -> ActionResult: + """Creates an ActionResult from the headers of a file response""" + + return ActionResult( + action_id=headers["x-madsci-action-id"], + status=ActionStatus(headers["x-wei-status"]), + errors=json.loads(headers["x-wei-error"]), + files=json.loads(headers["x-wei-files"]), + data=json.loads(headers["x-wei-data"]), + ) + + +class ActionResultWithFiles(FileResponse): + """Action response from a REST-based module.""" + + def from_action_response(self, action_response: ActionResult) -> ActionResult: + """Create an ActionResultWithFiles from an ActionResult.""" + if len(action_response.files) == 1: + return super().__init__( + path=next(iter(action_response.files.values())), + headers=action_response_to_headers(action_response), + ) + + with tempfile.NamedTemporaryFile( + suffix=".zip", + delete=False, + ) as temp_zipfile_path: + temp_zip = ZipFile(temp_zipfile_path, "w") + for file in action_response.files: + temp_zip.write(action_response.files[file]) + action_response.files[file] = str( + PureWindowsPath(action_response.files[file]).name, + ) + + return super().__init__( + path=temp_zipfile_path, + headers=action_response_to_headers(action_response), + ) + + +class RestNode(AbstractNode): + """REST-based node implementation and helper class. Inherit from this class to create a new REST-based node class.""" + + rest_api = None + """The REST API server for the node.""" + restart_flag = False + """Whether the node should restart the REST server.""" + exit_flag = False + """Whether the node should exit.""" + capabilities: NodeCapabilities = NodeCapabilities( + **RestNodeClient.supported_capabilities.model_dump(), + ) + + """------------------------------------------------------------------------------------------------""" + """Node Lifecycle and Public Methods""" + """------------------------------------------------------------------------------------------------""" + + def start_node(self, config: dict[str, Any] = {}) -> None: + """Start the node.""" + super().start_node(config) # *Kick off protocol agnostic-startup + self._start_rest_api() + + """------------------------------------------------------------------------------------------------""" + """Interface Methods""" + """------------------------------------------------------------------------------------------------""" + + def run_action( + self, + action_name: str, + args: Optional[str] = None, + files: list[UploadFile] = [], + action_id: Optional[str] = None, + ) -> Union[ActionResult, ActionResultWithFiles]: + """Run an action on the node.""" + if args: + args = json.loads(args) + if not isinstance(args, dict): + raise ValueError("args must be a JSON object") + else: + args = {} + with tempfile.TemporaryDirectory() as temp_dir: + # * Save the uploaded files to a temporary directory + for file in files: + with (Path(temp_dir) / file.filename).open("wb") as f: + shutil.copyfileobj(file.file, f) + response = super().run_action( + ActionRequest( + action_id=action_id if action_id else new_ulid_str(), + action_name=action_name, + args=args, + files={ + file.filename: Path(temp_dir) / file.filename for file in files + }, + ), + ) + # * Return a file response if there are files to be returned + if response.files: + return ActionResultWithFiles().from_action_response( + action_response=response, + ) + # * Otherwise, return a normal action response + return ActionResult.model_validate(response) + + def get_action_result( + self, + action_id: str, + ) -> Union[ActionResult, ActionResultWithFiles]: + """Get the status of an action on the node.""" + action_response = super().get_action_result(action_id) + if action_response.files: + return ActionResultWithFiles().from_action_response( + action_response=action_response, + ) + return ActionResult.model_validate(action_response) + + def get_action_history(self) -> list[str]: + """Get the action history of the node.""" + return super().get_action_history() + + def get_status(self) -> NodeStatus: + """Get the status of the node.""" + return super().get_status() + + def get_info(self) -> NodeInfo: + """Get information about the node.""" + return super().get_info() + + def get_state(self) -> dict[str, Any]: + """Get the state of the node.""" + return super().get_state() + + def get_log(self) -> list[Event]: + """Get the log of the node""" + return super().get_log() + + def set_config(self, config_key: str, config_value: Any) -> NodeSetConfigResponse: + """Set configuration values of the node.""" + return super().set_config(config_key, config_value) + + def run_admin_command(self, admin_command: AdminCommands) -> AdminCommandResponse: + """Perform an administrative command on the node.""" + return super().run_admin_command(admin_command) + + """------------------------------------------------------------------------------------------------""" + """Admin Commands""" + """------------------------------------------------------------------------------------------------""" + + def reset(self) -> AdminCommandResponse: + """Restart the node.""" + try: + self.restart_flag = True # * Restart the REST server + self.shutdown_handler() + self.startup_handler(self.config) + except Exception as exception: + return AdminCommandResponse( + success=False, + errors=[Error.from_exception(exception)], + ) + return AdminCommandResponse( + success=True, + errors=[], + ) + + def shutdown(self) -> AdminCommandResponse: + """Shutdown the node.""" + try: + self.restart_flag = False + + @threaded_task + def shutdown_server() -> None: + """Shutdown the REST server.""" + time.sleep(2) + self.rest_server_process.terminate() + self.exit_flag = True + + shutdown_server() + except Exception as exception: + return AdminCommandResponse( + success=False, + errors=[Error.from_exception(exception)], + ) + return AdminCommandResponse( + success=True, + errors=[], + ) + + """------------------------------------------------------------------------------------------------""" + """Internal and Private Methods""" + """------------------------------------------------------------------------------------------------""" + + def _start_rest_api(self) -> None: + """Start the REST API for the node.""" + import uvicorn + + self.rest_api = FastAPI(lifespan=self._lifespan) + self._configure_routes() + self.rest_server_process = Process( + target=uvicorn.run, + args=(self.rest_api,), + kwargs={"host": self.config["host"], "port": self.config["port"]}, + daemon=True, + ) + self.rest_server_process.start() + while True: + time.sleep(1) + if self.restart_flag: + self.rest_server_process.terminate() + self.restart_flag = False + self._start_rest_api() + break + if self.exit_flag: + break + + def _startup_thread(self) -> None: + """The startup thread for the REST API.""" + try: + # * Create a clean status and mark the node as initializing + self.node_status.initializing = True + self.node_status.errored = False + self.node_status.locked = False + self.node_status.paused = False + self.startup_handler() + except Exception as exception: + # * Handle any exceptions that occurred during startup + self._exception_handler(exception) + self.node_status.errored = True + finally: + # * Mark the node as no longer initializing + print("Startup complete") + self.node_status.initializing = False + + def _lifespan(self, app: FastAPI) -> Generator[None, None, None]: # noqa: ARG002 + """The lifespan of the REST API.""" + # * Run startup on a separate thread so it doesn't block the rest server from starting + # * (module won't accept actions until startup is complete) + Thread(target=self._startup_thread, daemon=True).start() + self._loop_handler() + + yield + + try: + # * Call any shutdown logic + self.shutdown_handler() + except Exception as exception: + # * If an exception occurs during shutdown, handle it so we at least see the error in logs/terminal + self._exception_handler(exception) + + def _configure_routes(self) -> None: + """Configure the routes for the REST API.""" + self.router = APIRouter() + self.router.add_api_route("/status", self.get_status, methods=["GET"]) + self.router.add_api_route("/info", self.get_info, methods=["GET"]) + self.router.add_api_route("/state", self.get_state, methods=["GET"]) + self.router.add_api_route( + "/action", + self.run_action, + methods=["POST"], + response_model=None, + ) + self.router.add_api_route( + "/action/{action_id}", + self.get_action_result, + methods=["GET"], + response_model=None, + ) + self.router.add_api_route("/action", self.get_action_history, methods=["GET"]) + self.router.add_api_route("/config", self.set_config, methods=["POST"]) + self.router.add_api_route( + "/admin/{admin_command}", + self.run_admin_command, + methods=["POST"], + ) + self.rest_api.include_router(self.router) + + +if __name__ == "__main__": + RestNode().start_node() diff --git a/madsci/madsci_module/pyproject.toml b/madsci/madsci_module/pyproject.toml index 32469b9..7dbe397 100644 --- a/madsci/madsci_module/pyproject.toml +++ b/madsci/madsci_module/pyproject.toml @@ -1,36 +1,36 @@ - -[project] -name = "madsci.module" -dynamic = ["version"] -description = "The Modular Autonomous Discovery for Science (MADSci) Node Module Helper Classes." -authors = [ - {name = "Tobias Ginsburg", email = "tginsburg@anl.gov"}, - {name = "Ryan D. Lewis", email = "ryan.lewis@anl.gov"}, - {name = "Casey Stone", email = "cstone@anl.gov"}, - {name = "Doga Ozgulbas", email = "dozgulbas@anl.gov"}, -] -requires-python = ">=3.9.1" -readme = "README.md" -license = {text = "MIT"} -dependencies = [ - "madsci.common" -] - -[project.urls] -Homepage = "https://github.com/AD-SDL/MADSci" - - -###################### -# Build Info + Tools # -###################### - -[build-system] -requires = ["pdm-backend"] -build-backend = "pdm.backend" - -[tool.pdm.version] -source = "scm" -fallback_version = "0.0.0" - -#[dependency-groups] -#dev = ["-e madsci-common @ file:///${PROJECT_ROOT}/../madsci_common"] + +[project] +name = "madsci.module" +dynamic = ["version"] +description = "The Modular Autonomous Discovery for Science (MADSci) Node Module Helper Classes." +authors = [ + {name = "Tobias Ginsburg", email = "tginsburg@anl.gov"}, + {name = "Ryan D. Lewis", email = "ryan.lewis@anl.gov"}, + {name = "Casey Stone", email = "cstone@anl.gov"}, + {name = "Doga Ozgulbas", email = "dozgulbas@anl.gov"}, +] +requires-python = ">=3.9.1" +readme = "README.md" +license = {text = "MIT"} +dependencies = [ + "madsci.common" +] + +[project.urls] +Homepage = "https://github.com/AD-SDL/MADSci" + + +###################### +# Build Info + Tools # +###################### + +[build-system] +requires = ["pdm-backend"] +build-backend = "pdm.backend" + +[tool.pdm.version] +source = "scm" +fallback_version = "0.0.0" + +#[dependency-groups] +#dev = ["-e madsci-common @ file:///${PROJECT_ROOT}/../madsci_common"] diff --git a/madsci/madsci_resource_manager/madsci/resource_manager/__init__.py b/madsci/madsci_resource_manager/madsci/resource_manager/__init__.py index 67597ac..17ae70f 100644 --- a/madsci/madsci_resource_manager/madsci/resource_manager/__init__.py +++ b/madsci/madsci_resource_manager/madsci/resource_manager/__init__.py @@ -1 +1 @@ -"""MADSci Resource Manager.""" +"""MADSci Resource Manager.""" diff --git a/madsci/madsci_resource_manager/madsci/resource_manager/resource_server.py b/madsci/madsci_resource_manager/madsci/resource_manager/resource_server.py index dddeaf4..6e1b869 100644 --- a/madsci/madsci_resource_manager/madsci/resource_manager/resource_server.py +++ b/madsci/madsci_resource_manager/madsci/resource_manager/resource_server.py @@ -1,33 +1,33 @@ -"""MADSci Resource Manager Server.""" - -from fastapi import FastAPI - -from madsci.resource_manager.types import ( - ResourceManagerConfig, - ResourceManagerDefinition, -) - -app = FastAPI() - -resource_manager_definition = ResourceManagerDefinition( - name="Resource Manager 1", - description="The First MADSci Resource Manager.", - plugin_config=ResourceManagerConfig(), -) -resource_manager_definition.url = f"https://{resource_manager_definition.plugin_config.host}:{resource_manager_definition.plugin_config.port}" - - -@app.get("/info") -def info() -> ResourceManagerDefinition: - """Get information about the resource manager.""" - return resource_manager_definition - - -if __name__ == "__main__": - import uvicorn - - uvicorn.run( - app, - host=resource_manager_definition.plugin_config.host, - port=resource_manager_definition.plugin_config.port, - ) +"""MADSci Resource Manager Server.""" + +from fastapi import FastAPI + +from madsci.resource_manager.types import ( + ResourceManagerConfig, + ResourceManagerDefinition, +) + +app = FastAPI() + +resource_manager_definition = ResourceManagerDefinition( + name="Resource Manager 1", + description="The First MADSci Resource Manager.", + plugin_config=ResourceManagerConfig(), +) +resource_manager_definition.url = f"https://{resource_manager_definition.plugin_config.host}:{resource_manager_definition.plugin_config.port}" + + +@app.get("/info") +def info() -> ResourceManagerDefinition: + """Get information about the resource manager.""" + return resource_manager_definition + + +if __name__ == "__main__": + import uvicorn + + uvicorn.run( + app, + host=resource_manager_definition.plugin_config.host, + port=resource_manager_definition.plugin_config.port, + ) diff --git a/madsci/madsci_resource_manager/madsci/resource_manager/types.py b/madsci/madsci_resource_manager/madsci/resource_manager/types.py index d58a584..e9d529c 100644 --- a/madsci/madsci_resource_manager/madsci/resource_manager/types.py +++ b/madsci/madsci_resource_manager/madsci/resource_manager/types.py @@ -1,36 +1,36 @@ -"""MADSci Resource Manager Types.""" - -from sqlmodel.main import Field - -from madsci.common.types.base_types import BaseModel -from madsci.common.types.squid_types import ManagerDefinition - - -class ResourceManagerDefinition(ManagerDefinition): - """Definition for a MADSci Resource Manager.""" - - plugin_type: str = Field( - default="resource_manager", - title="Plugin Type", - description="The type of the plugin, used by other components or plugins to find matching plugins.", - ) - plugin_config: "ResourceManagerConfig" = Field( - default_factory=lambda: ResourceManagerConfig(), - title="Plugin Configuration", - description="The configuration for the resource manager plugin.", - ) - - -class ResourceManagerConfig(BaseModel): - """Configuration for a MADSci Resource Manager.""" - - host: str = Field( - default="localhost", - title="Host", - description="The host to run the resource manager on.", - ) - port: int = Field( - default=8012, - title="Port", - description="The port to run the resource manager on.", - ) +"""MADSci Resource Manager Types.""" + +from sqlmodel.main import Field + +from madsci.common.types.base_types import BaseModel +from madsci.common.types.squid_types import ManagerDefinition + + +class ResourceManagerDefinition(ManagerDefinition): + """Definition for a MADSci Resource Manager.""" + + plugin_type: str = Field( + default="resource_manager", + title="Plugin Type", + description="The type of the plugin, used by other components or plugins to find matching plugins.", + ) + plugin_config: "ResourceManagerConfig" = Field( + default_factory=lambda: ResourceManagerConfig(), + title="Plugin Configuration", + description="The configuration for the resource manager plugin.", + ) + + +class ResourceManagerConfig(BaseModel): + """Configuration for a MADSci Resource Manager.""" + + host: str = Field( + default="localhost", + title="Host", + description="The host to run the resource manager on.", + ) + port: int = Field( + default=8012, + title="Port", + description="The port to run the resource manager on.", + ) diff --git a/madsci/madsci_resource_manager/pyproject.toml b/madsci/madsci_resource_manager/pyproject.toml index b977520..c8308d0 100644 --- a/madsci/madsci_resource_manager/pyproject.toml +++ b/madsci/madsci_resource_manager/pyproject.toml @@ -1,35 +1,35 @@ -[project] -name = "madsci.resource_manager" -dynamic = ["version"] -description = "The Modular Autonomous Discovery for Science (MADSci) Resource Manager." -authors = [ - {name = "Tobias Ginsburg", email = "tginsburg@anl.gov"}, - {name = "Ryan D. Lewis", email = "ryan.lewis@anl.gov"}, - {name = "Casey Stone", email = "cstone@anl.gov"}, - {name = "Doga Ozgulbas", email = "dozgulbas@anl.gov"}, -] -requires-python = ">=3.9.1" -readme = "README.md" -license = {text = "MIT"} -dependencies = [ - "madsci.common[server]" -] - -[project.urls] -Homepage = "https://github.com/AD-SDL/MADSci" - - -###################### -# Build Info + Tools # -###################### - -[build-system] -requires = ["pdm-backend"] -build-backend = "pdm.backend" - -[tool.pdm.version] -source = "scm" -fallback_version = "0.0.0" - -#[dependency-groups] -#dev = ["-e madsci-common @ file:///${PROJECT_ROOT}/../madsci_common"] +[project] +name = "madsci.resource_manager" +dynamic = ["version"] +description = "The Modular Autonomous Discovery for Science (MADSci) Resource Manager." +authors = [ + {name = "Tobias Ginsburg", email = "tginsburg@anl.gov"}, + {name = "Ryan D. Lewis", email = "ryan.lewis@anl.gov"}, + {name = "Casey Stone", email = "cstone@anl.gov"}, + {name = "Doga Ozgulbas", email = "dozgulbas@anl.gov"}, +] +requires-python = ">=3.9.1" +readme = "README.md" +license = {text = "MIT"} +dependencies = [ + "madsci.common[server]" +] + +[project.urls] +Homepage = "https://github.com/AD-SDL/MADSci" + + +###################### +# Build Info + Tools # +###################### + +[build-system] +requires = ["pdm-backend"] +build-backend = "pdm.backend" + +[tool.pdm.version] +source = "scm" +fallback_version = "0.0.0" + +#[dependency-groups] +#dev = ["-e madsci-common @ file:///${PROJECT_ROOT}/../madsci_common"] diff --git a/madsci/madsci_squid/madsci/squid/__init__.py b/madsci/madsci_squid/madsci/squid/__init__.py index f17a4b5..e26bae9 100644 --- a/madsci/madsci_squid/madsci/squid/__init__.py +++ b/madsci/madsci_squid/madsci/squid/__init__.py @@ -1 +1 @@ -"""The MADSci Squid server and workcell engine.""" +"""The MADSci Squid server and workcell engine.""" diff --git a/madsci/madsci_squid/madsci/squid/lab_server.py b/madsci/madsci_squid/madsci/squid/lab_server.py index 6e4c2ce..e594a13 100644 --- a/madsci/madsci_squid/madsci/squid/lab_server.py +++ b/madsci/madsci_squid/madsci/squid/lab_server.py @@ -1,25 +1,25 @@ -"""Squid Lab Server.""" - -import uvicorn -from fastapi import FastAPI -from starlette.responses import JSONResponse - -from madsci.common.definition_loaders import lab_definition_loader -from madsci.common.types.squid_types import LabDefinition - -app = FastAPI() - - -@app.get("/") -async def root() -> JSONResponse: - """Root endpoint.""" - return {"message": "Hello World"} - - -if __name__ == "__main__": - lab_definition = LabDefinition.model_validate(lab_definition_loader()) - uvicorn.run( - app, - host=lab_definition.server_config.host, - port=lab_definition.server_config.port, - ) +"""Squid Lab Server.""" + +import uvicorn +from fastapi import FastAPI +from starlette.responses import JSONResponse + +from madsci.common.definition_loaders import lab_definition_loader +from madsci.common.types.squid_types import LabDefinition + +app = FastAPI() + + +@app.get("/") +async def root() -> JSONResponse: + """Root endpoint.""" + return {"message": "Hello World"} + + +if __name__ == "__main__": + lab_definition = LabDefinition.model_validate(lab_definition_loader()) + uvicorn.run( + app, + host=lab_definition.server_config.host, + port=lab_definition.server_config.port, + ) diff --git a/madsci/madsci_squid/pyproject.toml b/madsci/madsci_squid/pyproject.toml index 9014c59..b5a90c8 100644 --- a/madsci/madsci_squid/pyproject.toml +++ b/madsci/madsci_squid/pyproject.toml @@ -1,35 +1,35 @@ -[project] -name = "madsci.squid" -dynamic = ["version"] -description = "The Modular Autonomous Discovery for Science (MADSci) Control Server and Scheduler, aka Squid." -authors = [ - {name = "Tobias Ginsburg", email = "tginsburg@anl.gov"}, - {name = "Ryan D. Lewis", email = "ryan.lewis@anl.gov"}, - {name = "Casey Stone", email = "cstone@anl.gov"}, - {name = "Doga Ozgulbas", email = "dozgulbas@anl.gov"}, -] -requires-python = ">=3.9.1" -readme = "README.md" -license = {text = "MIT"} -dependencies = [ - "madsci.common[server]" -] - -[project.urls] -Homepage = "https://github.com/AD-SDL/MADSci" - - -###################### -# Build Info + Tools # -###################### - -[build-system] -requires = ["pdm-backend"] -build-backend = "pdm.backend" - -[tool.pdm.version] -source = "scm" -fallback_version = "0.0.0" - -#[dependency-groups] -#dev = ["-e madsci-common @ file:///${PROJECT_ROOT}/../madsci_common"] +[project] +name = "madsci.squid" +dynamic = ["version"] +description = "The Modular Autonomous Discovery for Science (MADSci) Control Server and Scheduler, aka Squid." +authors = [ + {name = "Tobias Ginsburg", email = "tginsburg@anl.gov"}, + {name = "Ryan D. Lewis", email = "ryan.lewis@anl.gov"}, + {name = "Casey Stone", email = "cstone@anl.gov"}, + {name = "Doga Ozgulbas", email = "dozgulbas@anl.gov"}, +] +requires-python = ">=3.9.1" +readme = "README.md" +license = {text = "MIT"} +dependencies = [ + "madsci.common[server]" +] + +[project.urls] +Homepage = "https://github.com/AD-SDL/MADSci" + + +###################### +# Build Info + Tools # +###################### + +[build-system] +requires = ["pdm-backend"] +build-backend = "pdm.backend" + +[tool.pdm.version] +source = "scm" +fallback_version = "0.0.0" + +#[dependency-groups] +#dev = ["-e madsci-common @ file:///${PROJECT_ROOT}/../madsci_common"] diff --git a/madsci/madsci_workcell_manager/README.md b/madsci/madsci_workcell_manager/README.md new file mode 100644 index 0000000..e69de29 diff --git a/madsci/madsci_workcell_manager/madsci/tests/workflow_tests.py b/madsci/madsci_workcell_manager/madsci/tests/workflow_tests.py new file mode 100644 index 0000000..64e9fe7 --- /dev/null +++ b/madsci/madsci_workcell_manager/madsci/tests/workflow_tests.py @@ -0,0 +1,19 @@ +from madsci.common.types.workflow_types import WorkflowDefinition +import requests +from pathlib import Path +def test_send_workflow(workflow: str, parameters: dict): + workflow = WorkflowDefinition.from_yaml(workflow) + WorkflowDefinition.model_validate(workflow) + url = f"http://localhost:8013/start_workflow" + response = requests.post( + url, + data={ + "workflow": workflow.model_dump_json(), + "parameters":{} + }, + files=[] + ) + print("hi") + print(response) + +test_send_workflow(Path("../../../../tests/example/workflows/test_workflow.workflow.yaml").resolve(), {}) \ No newline at end of file diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/__init__.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/__init__.py new file mode 100644 index 0000000..52e9b73 --- /dev/null +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/__init__.py @@ -0,0 +1 @@ +"""MADSci Workcell Manager.""" diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py new file mode 100644 index 0000000..fa4c35f --- /dev/null +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py @@ -0,0 +1,220 @@ +""" +StateManager for WEI +""" + +import warnings +from typing import Any, Callable, Dict, Union + +import redis +from pottery import InefficientAccessWarning, RedisDict, Redlock +from pydantic import ValidationError + +from madsci.common.types.workcell_types import WorkcellDefinition +from madsci.common.types.workflow_types import Workflow +from madsci.common.types.base_types import new_ulid_str +from madsci.workcell_manager.workcell_manager_types import WorkcellManagerDefinition + + +class WorkcellRedisHandler: + """ + Manages state for WEI, providing transactional access to reading and writing state with + optimistic check-and-set and locking. + """ + + state_change_marker = "0" + _redis_connection: Any = None + + def __init__(self, workcell_manager_definition: WorkcellManagerDefinition) -> None: + """ + Initialize a StateManager for a given workcell. + """ + self._workcell_name = workcell_manager_definition.plugin_config.workcell_name + self._redis_host = workcell_manager_definition.plugin_config.redis_host + self._redis_port = workcell_manager_definition.plugin_config.redis_port + self._redis_password = workcell_manager_definition.plugin_config.redis_password + warnings.filterwarnings("ignore", category=InefficientAccessWarning) + + @property + def _workcell_prefix(self) -> str: + return f"workcell:{self._workcell_name}" + + @property + def _redis_client(self) -> Any: + """ + Returns a redis.Redis client, but only creates one connection. + MyPy can't handle Redis object return types for some reason, so no type-hinting. + """ + if self._redis_connection is None: + self._redis_connection = redis.Redis( + host=str(self._redis_host), + port=int(self._redis_port), + db=0, + decode_responses=True, + password=self._redis_password if self._redis_password else None, + ) + return self._redis_connection + + @property + def _workcell(self) -> RedisDict: + return RedisDict( + key=f"{self._workcell_prefix}:workcell", redis=self._redis_client + ) + + @property + def _workflows(self) -> RedisDict: + return RedisDict( + key=f"{self._workcell_prefix}:workflow_runs", redis=self._redis_client + ) + + + def wc_state_lock(self) -> Redlock: + """ + Gets a lock on the workcell's state. This should be called before any state updates are made, + or where we don't want the state to be changing underneath us (i.e., in the engine). + """ + return Redlock( + key=f"{self._workcell_prefix}:state_lock", + masters={self._redis_client}, + auto_release_time=60, + ) + + # *State Methods + def get_state(self) -> Dict[str, Dict[Any, Any]]: + """ + Return a dict containing the current state of the workcell. + """ + return { + "status": self.wc_status, + "error": self.error, + "locations": self._locations.to_dict(), + "modules": self._modules.to_dict(), + "workflows": self._workflow_runs.to_dict(), + "workcell": self._workcell.to_dict(), + "paused": self.paused, + "locked": self.locked, + "shutdown": self.shutdown, + } + + + @property + def error(self) -> str: + """Latest error on the server""" + return self._redis_client.get(f"{self._workcell_prefix}:error") + + @error.setter + def error(self, value: str) -> None: + """Add an error to the workcell's error deque""" + if self.error != value: + self.mark_state_changed() + return self._redis_client.set(f"{self._workcell_prefix}:error", value) + + def clear_state( + self, reset_locations: bool = True, clear_workflow_runs: bool = False + ) -> None: + """ + Clears the state of the workcell, optionally leaving the locations state intact. + """ + self._modules.clear() + if reset_locations: + self._locations.clear() + if clear_workflow_runs: + self._workflow_runs.clear() + self._workcell.clear() + self.state_change_marker = "0" + self.paused = False + self.locked = False + self.shutdown = False + self.mark_state_changed() + + def mark_state_changed(self) -> int: + """Marks the state as changed and returns the current state change counter""" + return int(self._redis_client.incr(f"{self._workcell_prefix}:state_changed")) + + def has_state_changed(self) -> bool: + """Returns True if the state has changed since the last time this method was called""" + state_change_marker = self._redis_client.get( + f"{self._workcell_prefix}:state_changed" + ) + if state_change_marker != self.state_change_marker: + self.state_change_marker = state_change_marker + return True + return False + + # *Workcell Methods + def get_workcell(self) -> WorkcellDefinition: + """ + Returns the current workcell as a Workcell object + """ + return WorkcellDefinition.model_validate(self._workcell.to_dict()) + + def set_workcell(self, workcell: WorkcellDefinition) -> None: + """ + Sets the active workcell + """ + self._workcell.update(**workcell.model_dump(mode="json")) + + def clear_workcell(self) -> None: + """ + Empty the workcell definition + """ + self._workcell.clear() + + def get_workcell_id(self) -> str: + """ + Returns the workcell ID + """ + wc_id = self._redis_client.get(f"{self._workcell_prefix}:workcell_id") + if wc_id is None: + self._redis_client.set( + f"{self._workcell_prefix}:workcell_id", new_ulid_str() + ) + wc_id = self._redis_client.get(f"{self._workcell_prefix}:workcell_id") + return wc_id + + # *Workflow Methods + def get_workflow(self, run_id: Union[str, str]) -> Workflow: + """ + Returns a workflow by ID + """ + return Workflow.model_validate(self._workflows[str(run_id)]) + + def get_all_workflows(self) -> dict[str, Workflow]: + """ + Returns all workflow runs + """ + valid_workflows = {} + for run_id, workflow in self._workflow.to_dict().items(): + try: + valid_workflows[str(run_id)] = Workflow.model_validate( + workflow + ) + except ValidationError: + continue + return valid_workflows + + def set_workflow(self, wf: Workflow) -> None: + """ + Sets a workflow by ID + """ + if isinstance(wf, Workflow): + wf_dump = wf.model_dump(mode="json") + else: + wf_dump = Workflow.model_validate(wf).model_dump(mode="json") + self._workflows[str(wf_dump["run_id"])] = wf_dump + self.mark_state_changed() + + def delete_workflow(self, run_id: Union[str, str]) -> None: + """ + Deletes a workflow by ID + """ + del self._workflows[str(run_id)] + self.mark_state_changed() + + def update_workflow( + self, run_id: str, func: Callable[..., Any], *args: Any, **kwargs: Any + ) -> None: + """ + Updates the state of a workflow. + """ + self.set_workflow(func(self.get_workflow(run_id), *args, **kwargs)) + diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/scheduler.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/scheduler.py new file mode 100644 index 0000000..e4f0d24 --- /dev/null +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/scheduler.py @@ -0,0 +1,59 @@ +from madsci.workcell_manager.redis_handler import WorkcellRedisHandler +from madsci.workcell_manager.types import WorkcellManagerDefinition +from madsci.common.types.workflow_types import WorkflowStatus +from madsci.common.types.event_types import Event +from madsci.common.utils import threaded_task +import time +import datetime + +def send_event(test: Event): + pass + + +class Scheduler: + def __init__(self, workcell_manager_definition: WorkcellManagerDefinition): + self.state_handler = WorkcellRedisHandler(workcell_manager_definition) + self.workcell_manager_definition = workcell_manager_definition + self.running = True + def run_iteration(self): + pass + + @threaded_task + def start(self): + while self.running is True: + self.run_iteration() + time.sleep(self.workcell_manager_definition.plugin_config.scheduler_interval) + +class DefaultScheduler(Scheduler): + + def run_iteration(self): + for run_id, wf_run in self.state_handler.get_all_workflow_runs().items(): + if wf_run.status == WorkflowStatus.NEW: + wf_run.status = WorkflowStatus.QUEUED + print( + f"Processed new workflow: {wf_run.name} with run_id: {run_id}" + ) + #send_event(WorkflowQueuedEvent.from_wf_run(wf_run=wf_run)) + self.state_handler.set_workflow_run(wf_run) + elif wf_run.status in [ + WorkflowStatus.QUEUED, + WorkflowStatus.IN_PROGRESS, + ]: + step = wf_run.steps[wf_run.step_index] + if check_step(wf_run.experiment_id, run_id, step): + module = find_step_module( + self.state_handler.get_workcell(), step.module + ) + + #if wf_run.status == WorkflowStatus.QUEUED: + #send_event(WorkflowStartEvent.from_wf_run(wf_run=wf_run)) + wf_run.status = WorkflowStatus.RUNNING + print( + f"Starting step {wf_run.name}.{step.name} for run: {run_id}" + ) + if wf_run.step_index == 0: + wf_run.start_time = datetime.now() + self.state_handler.set_workflow_run(wf_run) + run_step(wf_run=wf_run, module=module) + + diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_manager_types.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_manager_types.py new file mode 100644 index 0000000..e140086 --- /dev/null +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_manager_types.py @@ -0,0 +1,23 @@ +"""MADSci Resource Manager Types.""" + +from sqlmodel.main import Field + +from madsci.common.types.base_types import BaseModel +from madsci.common.types.squid_types import ManagerDefinition +from madsci.common.types.workcell_types import WorkcellConfig + + +class WorkcellManagerDefinition(ManagerDefinition): + """Definition for a MADSci Resource Manager.""" + + plugin_type: str = Field( + default="workcell_manager", + title="Plugin Type", + description="The type of the plugin, used by other components or plugins to find matching plugins.", + ) + plugin_config: "WorkcellConfig" = Field( + default_factory=lambda: WorkcellConfig(), + title="Plugin Configuration", + description="The configuration for the workcell manager plugin.", + ) + diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py new file mode 100644 index 0000000..69d992a --- /dev/null +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py @@ -0,0 +1,126 @@ +"""MADSci Workcell Manager Server.""" + +from fastapi import FastAPI, Form, HTTPException, UploadFile +from fastapi.datastructures import State +from redis_handler import WorkcellRedisHandler + +from madsci.workcell_manager.workcell_manager_types import ( + WorkcellManagerDefinition, +) + +from madsci.workcell_manager.workflow_utils import create_workflow, save_workflow_files + +from typing import Annotated, Optional +from madsci.common.types.workcell_types import WorkcellDefinition +from madsci.common.types.workflow_types import WorkflowDefinition, Workflow +import argparse +import json +import traceback + + +arg_parser = argparse.ArgumentParser() +arg_parser.add_argument( + "--workcell_file", + type=str, + default="./workcells/workcell.yaml", + help="location of the workcell file", +) +async def lifespan(app: FastAPI) -> None: + app.state.state_handler=WorkcellRedisHandler(workcell_manager_definition) + app.state.state_handler.set_workcell(workcell) + yield +app = FastAPI(lifespan=lifespan) + +@app.get("/info") +def info() -> WorkcellManagerDefinition: + """Get information about the resource manager.""" + return workcell_manager_definition + +@app.get("/workcell") +def get_workcell() -> WorkcellDefinition: + """Get information about the resource manager.""" + return app.state.state_handler.get_workcell() + +@app.post("/start_workflow") +async def start_run( + workflow: Annotated[str, Form()], + experiment_id: Annotated[Optional[str], Form()] = None, + parameters: Annotated[Optional[str], Form()] = None, + validate_only: Annotated[Optional[bool], Form()] = False, + files: list[UploadFile] = [], +) -> Workflow: + """ + parses the payload and workflow files, and then pushes a workflow job onto the redis queue + + Parameters + ---------- + workflow: UploadFile + - The workflow yaml file + parameters: Optional[Dict[str, Any]] = {} + - Dynamic values to insert into the workflow file + experiment_id: str + - The id of the experiment this workflow is associated with + simulate: bool + - whether to use real robots or not + validate_only: bool + - whether to validate the workflow without queueing it + + Returns + ------- + response: Workflow + - a workflow run object for the requested run_id + """ + try: + wf_def = WorkflowDefinition.model_validate_json(workflow) + except Exception as e: + traceback.print_exc() + raise HTTPException(status_code=422, detail=str(e)) from e + + if parameters is None: + parameters = {} + else: + parameters = json.loads(parameters) + if not isinstance(parameters, dict) or not all( + isinstance(k, str) for k in parameters.keys() + ): + raise HTTPException( + status_code=400, detail="Parameters must be a dictionary with string keys" + ) + workcell = app.state.state_handler.get_workcell() + + wf = create_workflow( + workflow_def=wf_def, + workcell=workcell, + experiment_id=experiment_id, + parameters=parameters, + ) + + if not validate_only: + wf = save_workflow_files(wf=wf, files=files) + with app.state.state_handler.wc_state_lock(): + app.state.state_handler.set_workflow(wf) + return wf + + + + + + + +if __name__ == "__main__": + import uvicorn + args = arg_parser.parse_args() + workcell_file = args.workcell_file + workcell = WorkcellDefinition.from_yaml(workcell_file) + workcell_manager_definition = WorkcellManagerDefinition( + name="Workcell Manager 1", + description="The First MADSci Workcell Manager.", + plugin_config=workcell.config, + manager_type="workcell_manager" + + ) + uvicorn.run( + app, + host=workcell_manager_definition.plugin_config.host, + port=workcell_manager_definition.plugin_config.port, + ) diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py new file mode 100644 index 0000000..bb4c4ad --- /dev/null +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py @@ -0,0 +1,103 @@ +"""Utility function for the workcell manager.""" +from madsci.common.types.workcell_types import WorkcellDefinition +from madsci.common.types.workflow_types import Workflow, WorkflowDefinition +from madsci.common.types.step_types import Step +from madsci.common.types.node_types import Node +from typing import Optional, Any +from fastapi import UploadFile + +def find_step_node(workcell: WorkcellDefinition, step_module: str) -> Optional[Node]: + """finds the full module information based on just its name + + Parameters + ---------- + step_module : str + the name of the module + Returns + ------- + module: Module + The class with full information about the given module + """ + for node in workcell.nodes: + node_name = node.name + if node_name == step_module: + return node + + raise ValueError(f"Module {step_module} not in Workcell {workcell.name}") + +def validate_node_names(workflow: Workflow, workcell: WorkcellDefinition) -> None: + """ + Validates that the nodes in the workflow.flowdef are in the workcell.modules + """ + [ + find_step_node(workcell, node_name) + for node_name in [step.node for step in workflow.flowdef] + ] +def replace_positions(workcell: WorkcellDefinition, step: Step): + """Allow the user to put location names instead of """ + pass + +def validate_step(step: Step) -> tuple[bool, str]: + """Check if a step is valid based on the node's info""" + return (True, "") + +def create_workflow( + workflow_def: WorkflowDefinition, + workcell: WorkcellDefinition, + experiment_id: Optional[str] = None, + parameters: Optional[dict[str, Any]] = None, + simulate: bool = False, +) -> Workflow: + """Pulls the workcell and builds a list of dictionary steps to be executed + + Parameters + ---------- + workflow_def: WorkflowDefintion + The workflow data file loaded in from the workflow yaml file + + workcell : Workcell + The Workcell object stored in the database + + parameters: Dict + The input to the workflow + + experiment_path: PathLike + The path to the data of the experiment for the workflow + + simulate: bool + Whether or not to use real robots + + Returns + ------- + steps: WorkflowRun + a completely initialized workflow run + """ + validate_node_names(workflow_def, workcell) + wf_dict = workflow_def.model_dump() + wf_dict.update( + { + "label": workflow_def.name, + "parameters": parameters, + "experiment_id": experiment_id, + "simulate": simulate, + } + ) + wf = Workflow(**wf_dict) + + steps = [] + for step in workflow_def.flowdef: + replace_positions(workcell, step) + valid, validation_string = validate_step(step) + print(validation_string) + if not valid: + raise ValueError(validation_string) + steps.append(step) + + wf.steps = steps + + return wf +def save_workflow_files(wf: Workflow, files: list[UploadFile]) -> Workflow: + """Saves the files to the workflow run directory, + and updates the step files to point to the new location""" + + return wf diff --git a/madsci/madsci_workcell_manager/pyproject.toml b/madsci/madsci_workcell_manager/pyproject.toml new file mode 100644 index 0000000..6f73364 --- /dev/null +++ b/madsci/madsci_workcell_manager/pyproject.toml @@ -0,0 +1,40 @@ +[project] +name = "madsci.workcell_manager" +dynamic = ["version"] +description = "The Modular Autonomous Discovery for Science (MADSci) Workcell Manager." +authors = [ + {name = "Tobias Ginsburg", email = "tginsburg@anl.gov"}, + {name = "Ryan D. Lewis", email = "ryan.lewis@anl.gov"}, + {name = "Casey Stone", email = "cstone@anl.gov"}, + {name = "Doga Ozgulbas", email = "dozgulbas@anl.gov"}, +] +requires-python = ">=3.9.1" +readme = "README.md" +license = {text = "MIT"} +dependencies = [ + "madsci.common[server]", + "fastapi", + "redis", + "pottery", + "uvicorn", + "python-multipart" +] + +[project.urls] +Homepage = "https://github.com/AD-SDL/MADSci" + + +###################### +# Build Info + Tools # +###################### + +[build-system] +requires = ["pdm-backend"] +build-backend = "pdm.backend" + +[tool.pdm.version] +source = "scm" +fallback_version = "0.0.0" + +#[dependency-groups] +#dev = ["-e madsci-common @ file:///${PROJECT_ROOT}/../madsci_common"] diff --git a/pdm.lock b/pdm.lock index ce8ea16..5d07b52 100644 --- a/pdm.lock +++ b/pdm.lock @@ -1,782 +1,1045 @@ -# This file is @generated by PDM. -# It is not intended for manual editing. - -[metadata] -groups = ["default", "dev"] -strategy = ["inherit_metadata"] -lock_version = "4.5.0" -content_hash = "sha256:dd0d66d317eee266aeb8ee31d1841b1ecd2661adbb474adc9a5f3b304485d3cb" - -[[metadata.targets]] -requires_python = ">=3.9.1" - -[[package]] -name = "aenum" -version = "3.1.15" -summary = "Advanced Enumerations (compatible with Python's stdlib Enum), NamedTuples, and NamedConstants" -groups = ["dev"] -files = [ - {file = "aenum-3.1.15-py3-none-any.whl", hash = "sha256:e0dfaeea4c2bd362144b87377e2c61d91958c5ed0b4daf89cb6f45ae23af6288"}, - {file = "aenum-3.1.15.tar.gz", hash = "sha256:8cbd76cd18c4f870ff39b24284d3ea028fbe8731a58df3aa581e434c575b9559"}, -] - -[[package]] -name = "annotated-types" -version = "0.7.0" -requires_python = ">=3.8" -summary = "Reusable constraint types to use with typing.Annotated" -groups = ["dev"] -dependencies = [ - "typing-extensions>=4.0.0; python_version < \"3.9\"", -] -files = [ - {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, - {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, -] - -[[package]] -name = "certifi" -version = "2024.8.30" -requires_python = ">=3.6" -summary = "Python package for providing Mozilla's CA Bundle." -groups = ["dev"] -files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.4.0" -requires_python = ">=3.7.0" -summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -groups = ["dev"] -files = [ - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, - {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, - {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, -] - -[[package]] -name = "click" -version = "8.1.7" -requires_python = ">=3.7" -summary = "Composable command line interface toolkit" -groups = ["dev"] -dependencies = [ - "colorama; platform_system == \"Windows\"", - "importlib-metadata; python_version < \"3.8\"", -] -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[[package]] -name = "colorama" -version = "0.4.6" -requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -summary = "Cross-platform colored terminal text." -groups = ["dev"] -marker = "platform_system == \"Windows\"" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "greenlet" -version = "3.1.1" -requires_python = ">=3.7" -summary = "Lightweight in-process concurrent programming" -groups = ["dev"] -marker = "(platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\"" -files = [ - {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, - {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, - {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, - {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, - {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, - {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, - {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, - {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, - {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, - {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, - {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, - {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, - {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, - {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, - {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, - {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, - {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, - {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, - {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, - {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, - {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, - {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, - {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, - {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, -] - -[[package]] -name = "idna" -version = "3.10" -requires_python = ">=3.6" -summary = "Internationalized Domain Names in Applications (IDNA)" -groups = ["dev"] -files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, -] - -[[package]] -name = "linkify-it-py" -version = "2.0.3" -requires_python = ">=3.7" -summary = "Links recognition library with FULL unicode support." -groups = ["dev"] -dependencies = [ - "uc-micro-py", -] -files = [ - {file = "linkify-it-py-2.0.3.tar.gz", hash = "sha256:68cda27e162e9215c17d786649d1da0021a451bdc436ef9e0fa0ba5234b9b048"}, - {file = "linkify_it_py-2.0.3-py3-none-any.whl", hash = "sha256:6bcbc417b0ac14323382aef5c5192c0075bf8a9d6b41820a2b66371eac6b6d79"}, -] - -[[package]] -name = "madsci-client" -version = "0.1.dev8" -requires_python = ">=3.9.1" -editable = true -path = "./madsci/madsci_client" -summary = "The Modular Autonomous Discovery for Science (MADSci) Python Client and CLI." -groups = ["dev"] -dependencies = [ - "click>=8.1.7", - "madsci-common", - "trogon>=0.6.0", -] - -[[package]] -name = "madsci-common" -version = "0.1.dev8" -requires_python = ">=3.9.1" -editable = true -path = "./madsci/madsci_common" -summary = "The Modular Autonomous Discovery for Science (MADSci) Common Definitions and Utilities." -groups = ["dev"] -dependencies = [ - "PyYAML>=6.0.2", - "aenum>=3.1.15", - "pydantic>=2.9.2", - "python-dotenv>=1.0.1", - "python-ulid[pydantic]>=3.0.0", - "requests>=2.32.3", - "sqlmodel>=0.0.22", -] - -[[package]] -name = "madsci-common" -version = "0.1.dev8" -extras = ["server"] -requires_python = ">=3.9.1" -summary = "The Modular Autonomous Discovery for Science (MADSci) Common Definitions and Utilities." -groups = ["dev"] -dependencies = [ - "madsci-common==0.1.dev8", -] - -[[package]] -name = "madsci-module" -version = "0.1.dev8" -requires_python = ">=3.9.1" -editable = true -path = "./madsci/madsci_module" -summary = "The Modular Autonomous Discovery for Science (MADSci) Module Helper Classes and Interfaces." -groups = ["dev"] -dependencies = [ - "madsci-common", -] - -[[package]] -name = "madsci-resource-manager" -version = "0.1.dev8" -requires_python = ">=3.9.1" -editable = true -path = "./madsci/madsci_resource_manager" -summary = "The Modular Autonomous Discovery for Science (MADSci) Resource Manager." -groups = ["dev"] -dependencies = [ - "madsci-common[server]", -] - -[[package]] -name = "madsci-squid" -version = "0.1.dev8" -requires_python = ">=3.9.1" -editable = true -path = "./madsci/madsci_squid" -summary = "The Modular Autonomous Discovery for Science (MADSci) Control Server and Scheduler, aka Squid." -groups = ["dev"] -dependencies = [ - "madsci-common[server]", -] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -requires_python = ">=3.8" -summary = "Python port of markdown-it. Markdown parsing, done right!" -groups = ["dev"] -dependencies = [ - "mdurl~=0.1", -] -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -extras = ["linkify", "plugins"] -requires_python = ">=3.8" -summary = "Python port of markdown-it. Markdown parsing, done right!" -groups = ["dev"] -dependencies = [ - "linkify-it-py<3,>=1", - "markdown-it-py==3.0.0", - "mdit-py-plugins", -] -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[[package]] -name = "mdit-py-plugins" -version = "0.4.2" -requires_python = ">=3.8" -summary = "Collection of plugins for markdown-it-py" -groups = ["dev"] -dependencies = [ - "markdown-it-py<4.0.0,>=1.0.0", -] -files = [ - {file = "mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"}, - {file = "mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"}, -] - -[[package]] -name = "mdurl" -version = "0.1.2" -requires_python = ">=3.7" -summary = "Markdown URL utilities" -groups = ["dev"] -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - -[[package]] -name = "platformdirs" -version = "4.3.6" -requires_python = ">=3.8" -summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -groups = ["dev"] -files = [ - {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, - {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, -] - -[[package]] -name = "pydantic" -version = "2.10.2" -requires_python = ">=3.8" -summary = "Data validation using Python type hints" -groups = ["dev"] -dependencies = [ - "annotated-types>=0.6.0", - "pydantic-core==2.27.1", - "typing-extensions>=4.12.2", -] -files = [ - {file = "pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e"}, - {file = "pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa"}, -] - -[[package]] -name = "pydantic-core" -version = "2.27.1" -requires_python = ">=3.8" -summary = "Core functionality for Pydantic validation and serialization" -groups = ["dev"] -dependencies = [ - "typing-extensions!=4.7.0,>=4.6.0", -] -files = [ - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, - {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, - {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, - {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, - {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, - {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, - {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, - {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, - {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, - {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, - {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, - {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, - {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, - {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, - {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, -] - -[[package]] -name = "pygments" -version = "2.18.0" -requires_python = ">=3.8" -summary = "Pygments is a syntax highlighting package written in Python." -groups = ["dev"] -files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, -] - -[[package]] -name = "python-dotenv" -version = "1.0.1" -requires_python = ">=3.8" -summary = "Read key-value pairs from a .env file and set them as environment variables" -groups = ["dev"] -files = [ - {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, - {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, -] - -[[package]] -name = "python-ulid" -version = "3.0.0" -requires_python = ">=3.9" -summary = "Universally unique lexicographically sortable identifier" -groups = ["dev"] -files = [ - {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, - {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, -] - -[[package]] -name = "python-ulid" -version = "3.0.0" -extras = ["pydantic"] -requires_python = ">=3.9" -summary = "Universally unique lexicographically sortable identifier" -groups = ["dev"] -dependencies = [ - "pydantic>=2.0", - "python-ulid==3.0.0", -] -files = [ - {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, - {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -requires_python = ">=3.8" -summary = "YAML parser and emitter for Python" -groups = ["dev"] -files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, -] - -[[package]] -name = "requests" -version = "2.32.3" -requires_python = ">=3.8" -summary = "Python HTTP for Humans." -groups = ["dev"] -dependencies = [ - "certifi>=2017.4.17", - "charset-normalizer<4,>=2", - "idna<4,>=2.5", - "urllib3<3,>=1.21.1", -] -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[[package]] -name = "rich" -version = "13.9.4" -requires_python = ">=3.8.0" -summary = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -groups = ["dev"] -dependencies = [ - "markdown-it-py>=2.2.0", - "pygments<3.0.0,>=2.13.0", - "typing-extensions<5.0,>=4.0.0; python_version < \"3.11\"", -] -files = [ - {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, - {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, -] - -[[package]] -name = "sqlalchemy" -version = "2.0.36" -requires_python = ">=3.7" -summary = "Database Abstraction Library" -groups = ["dev"] -dependencies = [ - "greenlet!=0.4.17; (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\"", - "importlib-metadata; python_version < \"3.8\"", - "typing-extensions>=4.6.0", -] -files = [ - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, - {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, - {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, -] - -[[package]] -name = "sqlmodel" -version = "0.0.22" -requires_python = ">=3.7" -summary = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness." -groups = ["dev"] -dependencies = [ - "SQLAlchemy<2.1.0,>=2.0.14", - "pydantic<3.0.0,>=1.10.13", -] -files = [ - {file = "sqlmodel-0.0.22-py3-none-any.whl", hash = "sha256:a1ed13e28a1f4057cbf4ff6cdb4fc09e85702621d3259ba17b3c230bfb2f941b"}, - {file = "sqlmodel-0.0.22.tar.gz", hash = "sha256:7d37c882a30c43464d143e35e9ecaf945d88035e20117bf5ec2834a23cbe505e"}, -] - -[[package]] -name = "textual" -version = "0.87.1" -requires_python = "<4.0.0,>=3.8.1" -summary = "Modern Text User Interface framework" -groups = ["dev"] -dependencies = [ - "markdown-it-py[linkify,plugins]>=2.1.0", - "platformdirs<5,>=3.6.0", - "rich>=13.3.3", - "typing-extensions<5.0.0,>=4.4.0", -] -files = [ - {file = "textual-0.87.1-py3-none-any.whl", hash = "sha256:026d1368cd10610a72a9d3de7a56692a17e7e8dffa0468147eb8e186ba0ff0c0"}, - {file = "textual-0.87.1.tar.gz", hash = "sha256:daf4e248ba3d890831ff2617099535eb835863a2e3609c8ce00af0f6d55ed123"}, -] - -[[package]] -name = "trogon" -version = "0.6.0" -requires_python = "<4.0.0,>=3.8.1" -summary = "Automatically generate a Textual TUI for your Click CLI" -groups = ["dev"] -dependencies = [ - "click>=8.0.0", - "textual>=0.61.0", -] -files = [ - {file = "trogon-0.6.0-py3-none-any.whl", hash = "sha256:fb5b6c25acd7a0eaba8d2cd32a57f1d80c26413cea737dad7a4eebcda56060e0"}, - {file = "trogon-0.6.0.tar.gz", hash = "sha256:fd1abfeb7b15d79d6e6cfc9e724aad2a2728812e4713a744d975f133e7ec73a4"}, -] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -requires_python = ">=3.8" -summary = "Backported and Experimental Type Hints for Python 3.8+" -groups = ["dev"] -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[[package]] -name = "uc-micro-py" -version = "1.0.3" -requires_python = ">=3.7" -summary = "Micro subset of unicode data files for linkify-it-py projects." -groups = ["dev"] -files = [ - {file = "uc-micro-py-1.0.3.tar.gz", hash = "sha256:d321b92cff673ec58027c04015fcaa8bb1e005478643ff4a500882eaab88c48a"}, - {file = "uc_micro_py-1.0.3-py3-none-any.whl", hash = "sha256:db1dffff340817673d7b466ec86114a9dc0e9d4d9b5ba229d9d60e5c12600cd5"}, -] - -[[package]] -name = "urllib3" -version = "2.2.3" -requires_python = ">=3.8" -summary = "HTTP library with thread-safe connection pooling, file post, and more." -groups = ["dev"] -files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, -] +# This file is @generated by PDM. +# It is not intended for manual editing. + +[metadata] +groups = ["default", "dev"] +strategy = ["inherit_metadata"] +lock_version = "4.5.0" +content_hash = "sha256:7197d510f6bfc8c488e5779aad2ebdc75e2e671e49964ebee1ad8f38dcca8455" + +[[metadata.targets]] +requires_python = ">=3.9.1" + +[[package]] +name = "aenum" +version = "3.1.15" +summary = "Advanced Enumerations (compatible with Python's stdlib Enum), NamedTuples, and NamedConstants" +groups = ["dev"] +files = [ + {file = "aenum-3.1.15-py3-none-any.whl", hash = "sha256:e0dfaeea4c2bd362144b87377e2c61d91958c5ed0b4daf89cb6f45ae23af6288"}, + {file = "aenum-3.1.15.tar.gz", hash = "sha256:8cbd76cd18c4f870ff39b24284d3ea028fbe8731a58df3aa581e434c575b9559"}, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +requires_python = ">=3.8" +summary = "Reusable constraint types to use with typing.Annotated" +groups = ["dev"] +dependencies = [ + "typing-extensions>=4.0.0; python_version < \"3.9\"", +] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.6.2.post1" +requires_python = ">=3.9" +summary = "High level compatibility layer for multiple asynchronous event loop implementations" +groups = ["dev"] +dependencies = [ + "exceptiongroup>=1.0.2; python_version < \"3.11\"", + "idna>=2.8", + "sniffio>=1.1", + "typing-extensions>=4.1; python_version < \"3.11\"", +] +files = [ + {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, + {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, +] + +[[package]] +name = "async-timeout" +version = "5.0.1" +requires_python = ">=3.8" +summary = "Timeout context manager for asyncio programs" +groups = ["dev"] +marker = "python_full_version <= \"3.11.2\"" +files = [ + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, +] + +[[package]] +name = "certifi" +version = "2024.8.30" +requires_python = ">=3.6" +summary = "Python package for providing Mozilla's CA Bundle." +groups = ["dev"] +files = [ + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.0" +requires_python = ">=3.7.0" +summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +groups = ["dev"] +files = [ + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +] + +[[package]] +name = "click" +version = "8.1.7" +requires_python = ">=3.7" +summary = "Composable command line interface toolkit" +groups = ["dev"] +dependencies = [ + "colorama; platform_system == \"Windows\"", + "importlib-metadata; python_version < \"3.8\"", +] +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +summary = "Cross-platform colored terminal text." +groups = ["dev"] +marker = "platform_system == \"Windows\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +requires_python = ">=3.7" +summary = "Backport of PEP 654 (exception groups)" +groups = ["dev"] +marker = "python_version < \"3.11\"" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[[package]] +name = "fastapi" +version = "0.115.6" +requires_python = ">=3.8" +summary = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +groups = ["dev"] +dependencies = [ + "pydantic!=1.8,!=1.8.1,!=2.0.0,!=2.0.1,!=2.1.0,<3.0.0,>=1.7.4", + "starlette<0.42.0,>=0.40.0", + "typing-extensions>=4.8.0", +] +files = [ + {file = "fastapi-0.115.6-py3-none-any.whl", hash = "sha256:e9240b29e36fa8f4bb7290316988e90c381e5092e0cbe84e7818cc3713bcf305"}, + {file = "fastapi-0.115.6.tar.gz", hash = "sha256:9ec46f7addc14ea472958a96aae5b5de65f39721a46aaf5705c480d9a8b76654"}, +] + +[[package]] +name = "greenlet" +version = "3.1.1" +requires_python = ">=3.7" +summary = "Lightweight in-process concurrent programming" +groups = ["dev"] +marker = "(platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\"" +files = [ + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, +] + +[[package]] +name = "h11" +version = "0.14.0" +requires_python = ">=3.7" +summary = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +groups = ["dev"] +dependencies = [ + "typing-extensions; python_version < \"3.8\"", +] +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "idna" +version = "3.10" +requires_python = ">=3.6" +summary = "Internationalized Domain Names in Applications (IDNA)" +groups = ["dev"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[[package]] +name = "linkify-it-py" +version = "2.0.3" +requires_python = ">=3.7" +summary = "Links recognition library with FULL unicode support." +groups = ["dev"] +dependencies = [ + "uc-micro-py", +] +files = [ + {file = "linkify-it-py-2.0.3.tar.gz", hash = "sha256:68cda27e162e9215c17d786649d1da0021a451bdc436ef9e0fa0ba5234b9b048"}, + {file = "linkify_it_py-2.0.3-py3-none-any.whl", hash = "sha256:6bcbc417b0ac14323382aef5c5192c0075bf8a9d6b41820a2b66371eac6b6d79"}, +] + +[[package]] +name = "madsci-client" +version = "0.0.0" +requires_python = ">=3.9.1" +editable = true +path = "./madsci/madsci_client" +summary = "The Modular Autonomous Discovery for Science (MADSci) Python Client and CLI." +groups = ["dev"] +dependencies = [ + "click>=8.1.7", + "madsci-common", + "trogon>=0.6.0", +] + +[[package]] +name = "madsci-common" +version = "0.0.0" +requires_python = ">=3.9.1" +editable = true +path = "./madsci/madsci_common" +summary = "The Modular Autonomous Discovery for Science (MADSci) Common Definitions and Utilities." +groups = ["dev"] +dependencies = [ + "PyYAML>=6.0.2", + "aenum>=3.1.15", + "pydantic>=2.9.2", + "python-dotenv>=1.0.1", + "python-ulid[pydantic]>=3.0.0", + "requests>=2.32.3", + "sqlmodel>=0.0.22", +] + +[[package]] +name = "madsci-common" +version = "0.0.0" +extras = ["server"] +requires_python = ">=3.9.1" +summary = "The Modular Autonomous Discovery for Science (MADSci) Common Definitions and Utilities." +groups = ["dev"] +dependencies = [ + "madsci-common==0.0.0", +] + +[[package]] +name = "madsci-module" +version = "0.0.0" +requires_python = ">=3.9.1" +editable = true +path = "./madsci/madsci_module" +summary = "The Modular Autonomous Discovery for Science (MADSci) Node Module Helper Classes." +groups = ["dev"] +dependencies = [ + "madsci-common", +] + +[[package]] +name = "madsci-resource-manager" +version = "0.0.0" +requires_python = ">=3.9.1" +editable = true +path = "./madsci/madsci_resource_manager" +summary = "The Modular Autonomous Discovery for Science (MADSci) Resource Manager." +groups = ["dev"] +dependencies = [ + "madsci-common[server]", +] + +[[package]] +name = "madsci-squid" +version = "0.0.0" +requires_python = ">=3.9.1" +editable = true +path = "./madsci/madsci_squid" +summary = "The Modular Autonomous Discovery for Science (MADSci) Control Server and Scheduler, aka Squid." +groups = ["dev"] +dependencies = [ + "madsci-common[server]", +] + +[[package]] +name = "madsci-workcell-manager" +version = "0.0.0" +requires_python = ">=3.9.1" +editable = true +path = "./madsci/madsci_workcell_manager" +summary = "The Modular Autonomous Discovery for Science (MADSci) Workcell Manager." +groups = ["dev"] +dependencies = [ + "fastapi", + "madsci-common[server]", + "pottery", + "python-multipart", + "redis", + "uvicorn", +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +requires_python = ">=3.8" +summary = "Python port of markdown-it. Markdown parsing, done right!" +groups = ["dev"] +dependencies = [ + "mdurl~=0.1", +] +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +extras = ["linkify", "plugins"] +requires_python = ">=3.8" +summary = "Python port of markdown-it. Markdown parsing, done right!" +groups = ["dev"] +dependencies = [ + "linkify-it-py<3,>=1", + "markdown-it-py==3.0.0", + "mdit-py-plugins", +] +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[[package]] +name = "mdit-py-plugins" +version = "0.4.2" +requires_python = ">=3.8" +summary = "Collection of plugins for markdown-it-py" +groups = ["dev"] +dependencies = [ + "markdown-it-py<4.0.0,>=1.0.0", +] +files = [ + {file = "mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"}, + {file = "mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"}, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +requires_python = ">=3.7" +summary = "Markdown URL utilities" +groups = ["dev"] +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + +[[package]] +name = "mmh3" +version = "5.0.1" +requires_python = ">=3.8" +summary = "Python extension for MurmurHash (MurmurHash3), a set of fast and robust hash functions." +groups = ["dev"] +files = [ + {file = "mmh3-5.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f0a4b4bf05778ed77d820d6e7d0e9bd6beb0c01af10e1ce9233f5d2f814fcafa"}, + {file = "mmh3-5.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac7a391039aeab95810c2d020b69a94eb6b4b37d4e2374831e92db3a0cdf71c6"}, + {file = "mmh3-5.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3a2583b5521ca49756d8d8bceba80627a9cc295f255dcab4e3df7ccc2f09679a"}, + {file = "mmh3-5.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:081a8423fe53c1ac94f87165f3e4c500125d343410c1a0c5f1703e898a3ef038"}, + {file = "mmh3-5.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8b4d72713799755dc8954a7d36d5c20a6c8de7b233c82404d122c7c7c1707cc"}, + {file = "mmh3-5.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:389a6fd51efc76d3182d36ec306448559c1244f11227d2bb771bdd0e6cc91321"}, + {file = "mmh3-5.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39f4128edaa074bff721b1d31a72508cba4d2887ee7867f22082e1fe9d4edea0"}, + {file = "mmh3-5.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d5d23a94d91aabba3386b3769048d5f4210fdfef80393fece2f34ba5a7b466c"}, + {file = "mmh3-5.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:16347d038361f8b8f24fd2b7ef378c9b68ddee9f7706e46269b6e0d322814713"}, + {file = "mmh3-5.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6e299408565af7d61f2d20a5ffdd77cf2ed902460fe4e6726839d59ba4b72316"}, + {file = "mmh3-5.0.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:42050af21ddfc5445ee5a66e73a8fc758c71790305e3ee9e4a85a8e69e810f94"}, + {file = "mmh3-5.0.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2ae9b1f5ef27ec54659920f0404b7ceb39966e28867c461bfe83a05e8d18ddb0"}, + {file = "mmh3-5.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:50c2495a02045f3047d71d4ae9cdd7a15efc0bcbb7ff17a18346834a8e2d1d19"}, + {file = "mmh3-5.0.1-cp310-cp310-win32.whl", hash = "sha256:c028fa77cddf351ca13b4a56d43c1775652cde0764cadb39120b68f02a23ecf6"}, + {file = "mmh3-5.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:c5e741e421ec14400c4aae30890515c201f518403bdef29ae1e00d375bb4bbb5"}, + {file = "mmh3-5.0.1-cp310-cp310-win_arm64.whl", hash = "sha256:b17156d56fabc73dbf41bca677ceb6faed435cc8544f6566d72ea77d8a17e9d0"}, + {file = "mmh3-5.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a6d5a9b1b923f1643559ba1fc0bf7a5076c90cbb558878d3bf3641ce458f25d"}, + {file = "mmh3-5.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3349b968be555f7334bbcce839da98f50e1e80b1c615d8e2aa847ea4a964a012"}, + {file = "mmh3-5.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1bd3c94b110e55db02ab9b605029f48a2f7f677c6e58c09d44e42402d438b7e1"}, + {file = "mmh3-5.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ba84d48608f79adbb10bb09986b6dc33eeda5c2d1bd75d00820081b73bde9"}, + {file = "mmh3-5.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c0217987a8b8525c8d9170f66d036dec4ab45cfbd53d47e8d76125791ceb155e"}, + {file = "mmh3-5.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2797063a34e78d1b61639a98b0edec1c856fa86ab80c7ec859f1796d10ba429"}, + {file = "mmh3-5.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8bba16340adcbd47853a2fbe5afdb397549e8f2e79324ff1dced69a3f8afe7c3"}, + {file = "mmh3-5.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:282797957c9f60b51b9d768a602c25f579420cc9af46feb77d457a27823d270a"}, + {file = "mmh3-5.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e4fb670c29e63f954f9e7a2cdcd57b36a854c2538f579ef62681ccbaa1de2b69"}, + {file = "mmh3-5.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ee7d85438dc6aff328e19ab052086a3c29e8a9b632998a49e5c4b0034e9e8d6"}, + {file = "mmh3-5.0.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b7fb5db231f3092444bc13901e6a8d299667126b00636ffbad4a7b45e1051e2f"}, + {file = "mmh3-5.0.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c100dd441703da5ec136b1d9003ed4a041d8a1136234c9acd887499796df6ad8"}, + {file = "mmh3-5.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:71f3b765138260fd7a7a2dba0ea5727dabcd18c1f80323c9cfef97a7e86e01d0"}, + {file = "mmh3-5.0.1-cp311-cp311-win32.whl", hash = "sha256:9a76518336247fd17689ce3ae5b16883fd86a490947d46a0193d47fb913e26e3"}, + {file = "mmh3-5.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:336bc4df2e44271f1c302d289cc3d78bd52d3eed8d306c7e4bff8361a12bf148"}, + {file = "mmh3-5.0.1-cp311-cp311-win_arm64.whl", hash = "sha256:af6522722fbbc5999aa66f7244d0986767a46f1fb05accc5200f75b72428a508"}, + {file = "mmh3-5.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f2730bb263ed9c388e8860438b057a53e3cc701134a6ea140f90443c4c11aa40"}, + {file = "mmh3-5.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6246927bc293f6d56724536400b85fb85f5be26101fa77d5f97dd5e2a4c69bf2"}, + {file = "mmh3-5.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fbca322519a6e6e25b6abf43e940e1667cf8ea12510e07fb4919b48a0cd1c411"}, + {file = "mmh3-5.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae8c19903ed8a1724ad9e67e86f15d198a7a1271a4f9be83d47e38f312ed672"}, + {file = "mmh3-5.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a09fd6cc72c07c0c07c3357714234b646d78052487c4a3bd5f7f6e08408cff60"}, + {file = "mmh3-5.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ff8551fee7ae3b11c5d986b6347ade0dccaadd4670ffdb2b944dee120ffcc84"}, + {file = "mmh3-5.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e39694c73a5a20c8bf36dfd8676ed351e5234d55751ba4f7562d85449b21ef3f"}, + {file = "mmh3-5.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eba6001989a92f72a89c7cf382fda831678bd780707a66b4f8ca90239fdf2123"}, + {file = "mmh3-5.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0771f90c9911811cc606a5c7b7b58f33501c9ee896ed68a6ac22c7d55878ecc0"}, + {file = "mmh3-5.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:09b31ed0c0c0920363e96641fac4efde65b1ab62b8df86293142f35a254e72b4"}, + {file = "mmh3-5.0.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5cf4a8deda0235312db12075331cb417c4ba163770edfe789bde71d08a24b692"}, + {file = "mmh3-5.0.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:41f7090a95185ef20ac018581a99337f0cbc84a2135171ee3290a9c0d9519585"}, + {file = "mmh3-5.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b97b5b368fb7ff22194ec5854f5b12d8de9ab67a0f304728c7f16e5d12135b76"}, + {file = "mmh3-5.0.1-cp312-cp312-win32.whl", hash = "sha256:842516acf04da546f94fad52db125ee619ccbdcada179da51c326a22c4578cb9"}, + {file = "mmh3-5.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:d963be0dbfd9fca209c17172f6110787ebf78934af25e3694fe2ba40e55c1e2b"}, + {file = "mmh3-5.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:a5da292ceeed8ce8e32b68847261a462d30fd7b478c3f55daae841404f433c15"}, + {file = "mmh3-5.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:673e3f1c8d4231d6fb0271484ee34cb7146a6499fc0df80788adb56fd76842da"}, + {file = "mmh3-5.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f795a306bd16a52ad578b663462cc8e95500b3925d64118ae63453485d67282b"}, + {file = "mmh3-5.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5ed57a5e28e502a1d60436cc25c76c3a5ba57545f250f2969af231dc1221e0a5"}, + {file = "mmh3-5.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:632c28e7612e909dbb6cbe2fe496201ada4695b7715584005689c5dc038e59ad"}, + {file = "mmh3-5.0.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53fd6bd525a5985e391c43384672d9d6b317fcb36726447347c7fc75bfed34ec"}, + {file = "mmh3-5.0.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dceacf6b0b961a0e499836af3aa62d60633265607aef551b2a3e3c48cdaa5edd"}, + {file = "mmh3-5.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f0738d478fdfb5d920f6aff5452c78f2c35b0eff72caa2a97dfe38e82f93da2"}, + {file = "mmh3-5.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e70285e7391ab88b872e5bef632bad16b9d99a6d3ca0590656a4753d55988af"}, + {file = "mmh3-5.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:27e5fc6360aa6b828546a4318da1a7da6bf6e5474ccb053c3a6aa8ef19ff97bd"}, + {file = "mmh3-5.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7989530c3c1e2c17bf5a0ec2bba09fd19819078ba90beedabb1c3885f5040b0d"}, + {file = "mmh3-5.0.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:cdad7bee649950da7ecd3cbbbd12fb81f1161072ecbdb5acfa0018338c5cb9cf"}, + {file = "mmh3-5.0.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e143b8f184c1bb58cecd85ab4a4fd6dc65a2d71aee74157392c3fddac2a4a331"}, + {file = "mmh3-5.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e5eb12e886f3646dd636f16b76eb23fc0c27e8ff3c1ae73d4391e50ef60b40f6"}, + {file = "mmh3-5.0.1-cp313-cp313-win32.whl", hash = "sha256:16e6dddfa98e1c2d021268e72c78951234186deb4df6630e984ac82df63d0a5d"}, + {file = "mmh3-5.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:d3ffb792d70b8c4a2382af3598dad6ae0c5bd9cee5b7ffcc99aa2f5fd2c1bf70"}, + {file = "mmh3-5.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:122fa9ec148383f9124292962bda745f192b47bfd470b2af5fe7bb3982b17896"}, + {file = "mmh3-5.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:48e84cf3cc7e8c41bc07de72299a73b92d9e3cde51d97851420055b1484995f7"}, + {file = "mmh3-5.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6dd9dc28c2d168c49928195c2e29b96f9582a5d07bd690a28aede4cc07b0e696"}, + {file = "mmh3-5.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2771a1c56a3d4bdad990309cff5d0a8051f29c8ec752d001f97d6392194ae880"}, + {file = "mmh3-5.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5ff2a8322ba40951a84411550352fba1073ce1c1d1213bb7530f09aed7f8caf"}, + {file = "mmh3-5.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a16bd3ec90682c9e0a343e6bd4c778c09947c8c5395cdb9e5d9b82b2559efbca"}, + {file = "mmh3-5.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d45733a78d68b5b05ff4a823aea51fa664df1d3bf4929b152ff4fd6dea2dd69b"}, + {file = "mmh3-5.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:904285e83cedebc8873b0838ed54c20f7344120be26e2ca5a907ab007a18a7a0"}, + {file = "mmh3-5.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac4aeb1784e43df728034d0ed72e4b2648db1a69fef48fa58e810e13230ae5ff"}, + {file = "mmh3-5.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:cb3d4f751a0b8b4c8d06ef1c085216c8fddcc8b8c8d72445976b5167a40c6d1e"}, + {file = "mmh3-5.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8021851935600e60c42122ed1176399d7692df338d606195cd599d228a04c1c6"}, + {file = "mmh3-5.0.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6182d5924a5efc451900f864cbb021d7e8ad5d524816ca17304a0f663bc09bb5"}, + {file = "mmh3-5.0.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:5f30b834552a4f79c92e3d266336fb87fd92ce1d36dc6813d3e151035890abbd"}, + {file = "mmh3-5.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cd4383f35e915e06d077df27e04ffd3be7513ec6a9de2d31f430393f67e192a7"}, + {file = "mmh3-5.0.1-cp39-cp39-win32.whl", hash = "sha256:1455fb6b42665a97db8fc66e89a861e52b567bce27ed054c47877183f86ea6e3"}, + {file = "mmh3-5.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:9e26a0f4eb9855a143f5938a53592fa14c2d3b25801c2106886ab6c173982780"}, + {file = "mmh3-5.0.1-cp39-cp39-win_arm64.whl", hash = "sha256:0d0a35a69abdad7549c4030a714bb4ad07902edb3bbe61e1bbc403ded5d678be"}, + {file = "mmh3-5.0.1.tar.gz", hash = "sha256:7dab080061aeb31a6069a181f27c473a1f67933854e36a3464931f2716508896"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +requires_python = ">=3.8" +summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +groups = ["dev"] +files = [ + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, +] + +[[package]] +name = "pottery" +version = "3.0.0" +requires_python = ">=3.7, <4" +summary = "Redis for Humans." +groups = ["dev"] +dependencies = [ + "mmh3", + "redis<5,>=4", + "typing-extensions", +] +files = [ + {file = "pottery-3.0.0-py3-none-any.whl", hash = "sha256:0190323bbb1289d40c5cd683feb04c4b8cff76a6c723f3ded9137c8bcc9fb5f8"}, + {file = "pottery-3.0.0.tar.gz", hash = "sha256:adda303e9357442bcac1d4c7f86aa7deec855e0190c101d09448afbcf5676a74"}, +] + +[[package]] +name = "pydantic" +version = "2.10.2" +requires_python = ">=3.8" +summary = "Data validation using Python type hints" +groups = ["dev"] +dependencies = [ + "annotated-types>=0.6.0", + "pydantic-core==2.27.1", + "typing-extensions>=4.12.2", +] +files = [ + {file = "pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e"}, + {file = "pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa"}, +] + +[[package]] +name = "pydantic-core" +version = "2.27.1" +requires_python = ">=3.8" +summary = "Core functionality for Pydantic validation and serialization" +groups = ["dev"] +dependencies = [ + "typing-extensions!=4.7.0,>=4.6.0", +] +files = [ + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, + {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, + {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, + {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, + {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, + {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, + {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, + {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, + {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, + {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, + {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, + {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, + {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, + {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, + {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, + {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, + {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, + {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, + {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, + {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, + {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, + {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, + {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, + {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, + {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, + {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, + {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, + {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, + {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, + {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, + {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, + {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, +] + +[[package]] +name = "pygments" +version = "2.18.0" +requires_python = ">=3.8" +summary = "Pygments is a syntax highlighting package written in Python." +groups = ["dev"] +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[[package]] +name = "python-dotenv" +version = "1.0.1" +requires_python = ">=3.8" +summary = "Read key-value pairs from a .env file and set them as environment variables" +groups = ["dev"] +files = [ + {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, + {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, +] + +[[package]] +name = "python-multipart" +version = "0.0.19" +requires_python = ">=3.8" +summary = "A streaming multipart parser for Python" +groups = ["dev"] +files = [ + {file = "python_multipart-0.0.19-py3-none-any.whl", hash = "sha256:f8d5b0b9c618575bf9df01c684ded1d94a338839bdd8223838afacfb4bb2082d"}, + {file = "python_multipart-0.0.19.tar.gz", hash = "sha256:905502ef39050557b7a6af411f454bc19526529ca46ae6831508438890ce12cc"}, +] + +[[package]] +name = "python-ulid" +version = "3.0.0" +requires_python = ">=3.9" +summary = "Universally unique lexicographically sortable identifier" +groups = ["dev"] +files = [ + {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, + {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, +] + +[[package]] +name = "python-ulid" +version = "3.0.0" +extras = ["pydantic"] +requires_python = ">=3.9" +summary = "Universally unique lexicographically sortable identifier" +groups = ["dev"] +dependencies = [ + "pydantic>=2.0", + "python-ulid==3.0.0", +] +files = [ + {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, + {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +requires_python = ">=3.8" +summary = "YAML parser and emitter for Python" +groups = ["dev"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "redis" +version = "4.6.0" +requires_python = ">=3.7" +summary = "Python client for Redis database and key-value store" +groups = ["dev"] +dependencies = [ + "async-timeout>=4.0.2; python_full_version <= \"3.11.2\"", + "importlib-metadata>=1.0; python_version < \"3.8\"", + "typing-extensions; python_version < \"3.8\"", +] +files = [ + {file = "redis-4.6.0-py3-none-any.whl", hash = "sha256:e2b03db868160ee4591de3cb90d40ebb50a90dd302138775937f6a42b7ed183c"}, + {file = "redis-4.6.0.tar.gz", hash = "sha256:585dc516b9eb042a619ef0a39c3d7d55fe81bdb4df09a52c9cdde0d07bf1aa7d"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +requires_python = ">=3.8" +summary = "Python HTTP for Humans." +groups = ["dev"] +dependencies = [ + "certifi>=2017.4.17", + "charset-normalizer<4,>=2", + "idna<4,>=2.5", + "urllib3<3,>=1.21.1", +] +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[[package]] +name = "rich" +version = "13.9.4" +requires_python = ">=3.8.0" +summary = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +groups = ["dev"] +dependencies = [ + "markdown-it-py>=2.2.0", + "pygments<3.0.0,>=2.13.0", + "typing-extensions<5.0,>=4.0.0; python_version < \"3.11\"", +] +files = [ + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +requires_python = ">=3.7" +summary = "Sniff out which async library your code is running under" +groups = ["dev"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.36" +requires_python = ">=3.7" +summary = "Database Abstraction Library" +groups = ["dev"] +dependencies = [ + "greenlet!=0.4.17; (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\"", + "importlib-metadata; python_version < \"3.8\"", + "typing-extensions>=4.6.0", +] +files = [ + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, + {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, + {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, + {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, + {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, + {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, + {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, + {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, +] + +[[package]] +name = "sqlmodel" +version = "0.0.22" +requires_python = ">=3.7" +summary = "SQLModel, SQL databases in Python, designed for simplicity, compatibility, and robustness." +groups = ["dev"] +dependencies = [ + "SQLAlchemy<2.1.0,>=2.0.14", + "pydantic<3.0.0,>=1.10.13", +] +files = [ + {file = "sqlmodel-0.0.22-py3-none-any.whl", hash = "sha256:a1ed13e28a1f4057cbf4ff6cdb4fc09e85702621d3259ba17b3c230bfb2f941b"}, + {file = "sqlmodel-0.0.22.tar.gz", hash = "sha256:7d37c882a30c43464d143e35e9ecaf945d88035e20117bf5ec2834a23cbe505e"}, +] + +[[package]] +name = "starlette" +version = "0.41.3" +requires_python = ">=3.8" +summary = "The little ASGI library that shines." +groups = ["dev"] +dependencies = [ + "anyio<5,>=3.4.0", + "typing-extensions>=3.10.0; python_version < \"3.10\"", +] +files = [ + {file = "starlette-0.41.3-py3-none-any.whl", hash = "sha256:44cedb2b7c77a9de33a8b74b2b90e9f50d11fcf25d8270ea525ad71a25374ff7"}, + {file = "starlette-0.41.3.tar.gz", hash = "sha256:0e4ab3d16522a255be6b28260b938eae2482f98ce5cc934cb08dce8dc3ba5835"}, +] + +[[package]] +name = "textual" +version = "0.87.1" +requires_python = "<4.0.0,>=3.8.1" +summary = "Modern Text User Interface framework" +groups = ["dev"] +dependencies = [ + "markdown-it-py[linkify,plugins]>=2.1.0", + "platformdirs<5,>=3.6.0", + "rich>=13.3.3", + "typing-extensions<5.0.0,>=4.4.0", +] +files = [ + {file = "textual-0.87.1-py3-none-any.whl", hash = "sha256:026d1368cd10610a72a9d3de7a56692a17e7e8dffa0468147eb8e186ba0ff0c0"}, + {file = "textual-0.87.1.tar.gz", hash = "sha256:daf4e248ba3d890831ff2617099535eb835863a2e3609c8ce00af0f6d55ed123"}, +] + +[[package]] +name = "trogon" +version = "0.6.0" +requires_python = "<4.0.0,>=3.8.1" +summary = "Automatically generate a Textual TUI for your Click CLI" +groups = ["dev"] +dependencies = [ + "click>=8.0.0", + "textual>=0.61.0", +] +files = [ + {file = "trogon-0.6.0-py3-none-any.whl", hash = "sha256:fb5b6c25acd7a0eaba8d2cd32a57f1d80c26413cea737dad7a4eebcda56060e0"}, + {file = "trogon-0.6.0.tar.gz", hash = "sha256:fd1abfeb7b15d79d6e6cfc9e724aad2a2728812e4713a744d975f133e7ec73a4"}, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +requires_python = ">=3.8" +summary = "Backported and Experimental Type Hints for Python 3.8+" +groups = ["dev"] +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] + +[[package]] +name = "uc-micro-py" +version = "1.0.3" +requires_python = ">=3.7" +summary = "Micro subset of unicode data files for linkify-it-py projects." +groups = ["dev"] +files = [ + {file = "uc-micro-py-1.0.3.tar.gz", hash = "sha256:d321b92cff673ec58027c04015fcaa8bb1e005478643ff4a500882eaab88c48a"}, + {file = "uc_micro_py-1.0.3-py3-none-any.whl", hash = "sha256:db1dffff340817673d7b466ec86114a9dc0e9d4d9b5ba229d9d60e5c12600cd5"}, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +requires_python = ">=3.8" +summary = "HTTP library with thread-safe connection pooling, file post, and more." +groups = ["dev"] +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, +] + +[[package]] +name = "uvicorn" +version = "0.32.1" +requires_python = ">=3.8" +summary = "The lightning-fast ASGI server." +groups = ["dev"] +dependencies = [ + "click>=7.0", + "h11>=0.8", + "typing-extensions>=4.0; python_version < \"3.11\"", +] +files = [ + {file = "uvicorn-0.32.1-py3-none-any.whl", hash = "sha256:82ad92fd58da0d12af7482ecdb5f2470a04c9c9a53ced65b9bbb4a205377602e"}, + {file = "uvicorn-0.32.1.tar.gz", hash = "sha256:ee9519c246a72b1c084cea8d3b44ed6026e78a4a309cbedae9c37e4cb9fbb175"}, +] diff --git a/pyproject.toml b/pyproject.toml index c99d363..d12804e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,17 +1,18 @@ -[project] -name = "madsci" -dependencies = [] -requires-python = ">=3.9.1" -dynamic = ["version"] - -[tool.pdm.dev-dependencies] -dev = [ - "-e madsci.common @ file:///${PROJECT_ROOT}/madsci/madsci_common", - "-e madsci.squid @ file:///${PROJECT_ROOT}/madsci/madsci_squid", - "-e madsci.client @ file:///${PROJECT_ROOT}/madsci/madsci_client", - "-e madsci.module @ file:///${PROJECT_ROOT}/madsci/madsci_module", - "-e madsci.resource_manager @ file:///${PROJECT_ROOT}/madsci/madsci_resource_manager", -] -[tool.pdm.version] -source = "scm" -fallback_version = "0.0.0" +[project] +name = "madsci" +dependencies = [] +requires-python = ">=3.9.1" +dynamic = ["version"] + +[tool.pdm.dev-dependencies] +dev = [ + "-e madsci.common @ file:///${PROJECT_ROOT}/madsci/madsci_common", + "-e madsci.squid @ file:///${PROJECT_ROOT}/madsci/madsci_squid", + "-e madsci.client @ file:///${PROJECT_ROOT}/madsci/madsci_client", + "-e madsci.module @ file:///${PROJECT_ROOT}/madsci/madsci_module", + "-e madsci.resource_manager @ file:///${PROJECT_ROOT}/madsci/madsci_resource_manager", + "-e madsci.workcell_manager @ file:///${PROJECT_ROOT}/madsci/madsci_workcell_manager", +] +[tool.pdm.version] +source = "scm" +fallback_version = "0.0.0" diff --git a/ruff.toml b/ruff.toml index 53bf857..76e5ff9 100644 --- a/ruff.toml +++ b/ruff.toml @@ -1,118 +1,118 @@ -# https://docs.astral.sh/ruff/configuration/ - -# Exclude a variety of commonly ignored directories. -exclude = [ - ".bzr", - ".direnv", - ".eggs", - ".git", - ".git-rewrite", - ".hg", - ".mypy_cache", - ".nox", - ".pants.d", - ".pytype", - ".ruff_cache", - ".svn", - ".tox", - ".venv", - "__pypackages__", - "_build", - "buck-out", - "build", - "dist", - "node_modules", - "venv", - ".venv", - "docs", -] - -# Same as Black. -line-length = 88 -indent-width = 4 - -# Assume Python 3.9 -target-version = "py39" - -[lint] -# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. -select = [ - # pycodestyle - "E", - # Pyflakes - "F", - # pyupgrade - "UP", - # flake8-bugbear - "B", - # flake8-simplify - "SIM", - # isort - "I", - # Warning - "W", - # pydocstyle - "D100", "D101", "D102", "D103", "D104", "D105", "D106", "D107", - # ruff - "RUF", - # McCabe Complexity - "C90", - # pep8-naming (N) - "N", - # flake8-2020 (YTT) - "YTT", - # flake8-annotations (ANN) - "ANN", - # flake8-async - "ASYNC", - # flake8-bandit - "S", - # flake8-commas - "COM", - # flake8-use-pathlib - "PTH", - # FastAPI - "FAST", - # flake8-logging - "LOG", - # flake8-comprehensions - "C4", - # flake8-pie - "PIE", - # flake8-return - "RET", - # Pylint - "PL", - # eradicate - "ERA", - # refurb - "FURB", - # flake-8-unused-arguments - "ARG", -] -ignore = [ - "E501", # Line too long - "B006", # Do not use mutable data structures for argument defaults - "ANN401", # Don't flag Any types - "COM812", # Don't fight over trailing commas -] - -# Allow fix for all enabled rules (when `--fix`) is provided. -fixable = ["ALL"] -unfixable = [] - -# Allow unused variables when underscore-prefixed. -dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" - -[format] -# Like Black, use double quotes for strings. -quote-style = "double" - -# Like Black, indent with spaces, rather than tabs. -indent-style = "space" - -# Like Black, respect magic trailing commas. -skip-magic-trailing-comma = false - -# Like Black, automatically detect the appropriate line ending. -line-ending = "auto" +# https://docs.astral.sh/ruff/configuration/ + +# Exclude a variety of commonly ignored directories. +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".mypy_cache", + ".nox", + ".pants.d", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "venv", + ".venv", + "docs", +] + +# Same as Black. +line-length = 88 +indent-width = 4 + +# Assume Python 3.9 +target-version = "py39" + +[lint] +# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. +select = [ + # pycodestyle + "E", + # Pyflakes + "F", + # pyupgrade + "UP", + # flake8-bugbear + "B", + # flake8-simplify + "SIM", + # isort + "I", + # Warning + "W", + # pydocstyle + "D100", "D101", "D102", "D103", "D104", "D105", "D106", "D107", + # ruff + "RUF", + # McCabe Complexity + "C90", + # pep8-naming (N) + "N", + # flake8-2020 (YTT) + "YTT", + # flake8-annotations (ANN) + "ANN", + # flake8-async + "ASYNC", + # flake8-bandit + "S", + # flake8-commas + "COM", + # flake8-use-pathlib + "PTH", + # FastAPI + "FAST", + # flake8-logging + "LOG", + # flake8-comprehensions + "C4", + # flake8-pie + "PIE", + # flake8-return + "RET", + # Pylint + "PL", + # eradicate + "ERA", + # refurb + "FURB", + # flake-8-unused-arguments + "ARG", +] +ignore = [ + "E501", # Line too long + "B006", # Do not use mutable data structures for argument defaults + "ANN401", # Don't flag Any types + "COM812", # Don't fight over trailing commas +] + +# Allow fix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +unfixable = [] + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +[format] +# Like Black, use double quotes for strings. +quote-style = "double" + +# Like Black, indent with spaces, rather than tabs. +indent-style = "space" + +# Like Black, respect magic trailing commas. +skip-magic-trailing-comma = false + +# Like Black, automatically detect the appropriate line ending. +line-ending = "auto" diff --git a/tests/.gitignore b/tests/.gitignore index 811a0c2..53d7881 100644 --- a/tests/.gitignore +++ b/tests/.gitignore @@ -1 +1 @@ -test_lab/ +test_lab/ diff --git a/tests/example/example_lab.lab.yaml b/tests/example/example_lab.lab.yaml index c4c23c3..9a916f2 100644 --- a/tests/example/example_lab.lab.yaml +++ b/tests/example/example_lab.lab.yaml @@ -1,11 +1,11 @@ -name: ExampleLab -lab_id: 01JDN5Q2T3BJQZQF17AA0VTT15 -description: An example for Doga -server_config: - host: 127.0.0.1 - port: 8000 -workcells: - TestWorkcell: workcells/test_workcell.workcell.yaml -commands: - helloworld: echo "hello world" -managers: {} +name: ExampleLab +lab_id: 01JDN5Q2T3BJQZQF17AA0VTT15 +description: An example for Doga +server_config: + host: 127.0.0.1 + port: 8000 +workcells: + TestWorkcell: workcells/test_workcell.workcell.yaml +commands: + helloworld: echo "hello world" +managers: {} diff --git a/tests/example/workcells/test_workcell.workcell.yaml b/tests/example/workcells/test_workcell.workcell.yaml index d033ee3..975fd61 100644 --- a/tests/example/workcells/test_workcell.workcell.yaml +++ b/tests/example/workcells/test_workcell.workcell.yaml @@ -1,8 +1,9 @@ -name: TestWorkcell -workcell_id: 01JDN5Z858MNVYG7MJW5D6Z94F -description: asldfkj -config: - scheduler_update_interval: 0.1 - node_update_interval: 1.0 - auto_start: true -nodes: {} +name: TestWorkcell +workcell_id: 01JDN5Z858MNVYG7MJW5D6Z94F +description: asldfkj +config: + scheduler_update_interval: 0.1 + node_update_interval: 1.0 + auto_start: true + +nodes: {} diff --git a/tests/example/workflows/test_workflow.workflow.yaml b/tests/example/workflows/test_workflow.workflow.yaml new file mode 100644 index 0000000..b473a2d --- /dev/null +++ b/tests/example/workflows/test_workflow.workflow.yaml @@ -0,0 +1,18 @@ +name: Test_Workflow +metadata: + author: Tobias Ginsburg, Kyle Hippe, Ryan D. Lewis + info: Example workflow for WEI + version: 0.3 +parameters: + - name: delay + default: 1.5 + - name: pos + - name: aim +flowdef: + - name: Get plate to $pos + node: transfer + action: transfer + args: + target: thingy + comment: Get a new plate + diff --git a/tests/example_resources.yaml b/tests/example_resources.yaml index a819e38..fb1a3d3 100644 --- a/tests/example_resources.yaml +++ b/tests/example_resources.yaml @@ -1,110 +1,110 @@ -resource_types: - # * Consumables - - type_name: liquid - type_description: A liquid in a well. - base_type: consumable - - type_name: powder - type_description: A powder in a well. - base_type: consumable - - type_name: water - type_description: A consumable resource type for water. - base_type: consumable - parent_types: liquid # * Can be used in wells because it is a liquid - - # * Plates - - type_name: plate - type_description: Any ANSI/SLAS standard microplate. - base_type: asset - - type_name: plate_96_well_corningware - type_description: A 96 well ANSI/SLAS standard microplate made by corningware. - base_type: grid - parent_types: - - plate - default_child_type: well - default_child_template: - - resource_type: well - resource_name: ${self.row}${self.column}} #* i.e. A1 - resource_description: Well ${self.key} in plate ${self.parent.resource_name}. - resizeable: false - rows: [A, B, C, D, E, F, G, H] #* Explicitly defined row indices - columns: 12 # automatically converted to range 1-12 - supported_child_types: - - well - - type_name: well - type_description: A single well in a 96 well plate. - base_type: pool - default_capacity: null # *no limit to number of items (i.e. distinct liquids) in the well - resizeable: false - supported_child_types: - - liquid - - powder - - # * Containers - - type_name: nest - type_description: A nest for plates. - base_type: container - default_capacity: 1 - resizeable: false - supported_child_types: - - plate - - type_name: plate_stack - type_description: A stack of 96 well plates. - base_type: stack - default_capacity: 10 - resizeable: true #* Plate stacks can be many different sizes - default_child_type: plate_96_well_corningware - supported_child_types: - - plate - - type_name: trash_conveyor - type_description: A trash conveyor. - base_type: queue - default_capacity: 10 - resizeable: true #* allow any size trash conveyor - supported_child_types: - - resource - - type_name: incubator - type_description: A simple incubator with 10 nests. - base_type: collection - default_capacity: 10 - resizeable: true #* allow any size incubator - supported_child_types: - - plate - keys: [A, B, C, D, E, F, G, H] #* Explicitly defined keys - - - -default_resources: - - resource_name: plate_stack_1 - resource_type: plate_stack - resource_description: A 10-plate stack for ANSI/SLAS standard microplates. - parent: null #* default to the owning module or workcell - default_children: #* Explicitly defined children - - resource_type: plate_96_well_corningware - resource_name: plate_96_well_corningware_1 - resource_description: A 96 well ANSI/SLAS standard microplate made by corningware. - # parent: plate_stack_1 #* default to the parent resource, no need to specify - - resource_name: trash_conveyor_1 - resource_type: trash_conveyor - resource_description: A trash conveyor. - parent: null #* default to the owning module or workcell - capacity: 5 #* override the default capacity - - resource_name: incubator_1 - resource_type: incubator - resource_description: A simple incubator with 10 wells. - parent: null #* default to the owning module or workcell - capacity: 20 #* override the default capacity - keys: 20 #* override the default keys - default_children: - - resource_type: plate_96_well_corningware - resource_name: plate_96_well_corningware_2 - resource_description: A 96 well ANSI/SLAS standard microplate made by corningware. - # parent: incubator_1 #* default to the parent resource, no need to specify - - resource_name: test_unknown_resource_type - resource_type: unknown_resource_type #TODO: support resource types defined by other sources - resource_description: A test resource with an unknown resource type. - parent: null #* default to the owning module or workcell - default_children: - - resource_type: liquid - resource_name: liquid_1 - resource_description: A test liquid. - parent: test_unknown_resource_type +resource_types: + # * Consumables + - type_name: liquid + type_description: A liquid in a well. + base_type: consumable + - type_name: powder + type_description: A powder in a well. + base_type: consumable + - type_name: water + type_description: A consumable resource type for water. + base_type: consumable + parent_types: liquid # * Can be used in wells because it is a liquid + + # * Plates + - type_name: plate + type_description: Any ANSI/SLAS standard microplate. + base_type: asset + - type_name: plate_96_well_corningware + type_description: A 96 well ANSI/SLAS standard microplate made by corningware. + base_type: grid + parent_types: + - plate + default_child_type: well + default_child_template: + - resource_type: well + resource_name: ${self.row}${self.column}} #* i.e. A1 + resource_description: Well ${self.key} in plate ${self.parent.resource_name}. + resizeable: false + rows: [A, B, C, D, E, F, G, H] #* Explicitly defined row indices + columns: 12 # automatically converted to range 1-12 + supported_child_types: + - well + - type_name: well + type_description: A single well in a 96 well plate. + base_type: pool + default_capacity: null # *no limit to number of items (i.e. distinct liquids) in the well + resizeable: false + supported_child_types: + - liquid + - powder + + # * Containers + - type_name: nest + type_description: A nest for plates. + base_type: container + default_capacity: 1 + resizeable: false + supported_child_types: + - plate + - type_name: plate_stack + type_description: A stack of 96 well plates. + base_type: stack + default_capacity: 10 + resizeable: true #* Plate stacks can be many different sizes + default_child_type: plate_96_well_corningware + supported_child_types: + - plate + - type_name: trash_conveyor + type_description: A trash conveyor. + base_type: queue + default_capacity: 10 + resizeable: true #* allow any size trash conveyor + supported_child_types: + - resource + - type_name: incubator + type_description: A simple incubator with 10 nests. + base_type: collection + default_capacity: 10 + resizeable: true #* allow any size incubator + supported_child_types: + - plate + keys: [A, B, C, D, E, F, G, H] #* Explicitly defined keys + + + +default_resources: + - resource_name: plate_stack_1 + resource_type: plate_stack + resource_description: A 10-plate stack for ANSI/SLAS standard microplates. + parent: null #* default to the owning module or workcell + default_children: #* Explicitly defined children + - resource_type: plate_96_well_corningware + resource_name: plate_96_well_corningware_1 + resource_description: A 96 well ANSI/SLAS standard microplate made by corningware. + # parent: plate_stack_1 #* default to the parent resource, no need to specify + - resource_name: trash_conveyor_1 + resource_type: trash_conveyor + resource_description: A trash conveyor. + parent: null #* default to the owning module or workcell + capacity: 5 #* override the default capacity + - resource_name: incubator_1 + resource_type: incubator + resource_description: A simple incubator with 10 wells. + parent: null #* default to the owning module or workcell + capacity: 20 #* override the default capacity + keys: 20 #* override the default keys + default_children: + - resource_type: plate_96_well_corningware + resource_name: plate_96_well_corningware_2 + resource_description: A 96 well ANSI/SLAS standard microplate made by corningware. + # parent: incubator_1 #* default to the parent resource, no need to specify + - resource_name: test_unknown_resource_type + resource_type: unknown_resource_type #TODO: support resource types defined by other sources + resource_description: A test resource with an unknown resource type. + parent: null #* default to the owning module or workcell + default_children: + - resource_type: liquid + resource_name: liquid_1 + resource_description: A test liquid. + parent: test_unknown_resource_type diff --git a/tests/test_cli.ipynb b/tests/test_cli.ipynb index 1618942..898134c 100644 --- a/tests/test_cli.ipynb +++ b/tests/test_cli.ipynb @@ -1,144 +1,144 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "from pathlib import Path\n", - "\n", - "if Path.cwd().stem == \"test_lab\":\n", - " os.chdir(\"..\")\n", - "\n", - "path = Path.cwd() / \"test_lab\"\n", - "if path.exists():\n", - " print(\"Directory test_lab/ already exists, removing...\")\n", - "\n", - " def remove_children(path: Path) -> None:\n", - " \"\"\"Recursively remove all children of a directory.\"\"\"\n", - " for child in path.iterdir():\n", - " if child.is_file():\n", - " child.unlink()\n", - " else:\n", - " remove_children(child)\n", - " child.rmdir()\n", - "\n", - " remove_children(path)\n", - " if path.is_dir():\n", - " path.rmdir()\n", - "\n", - "path.mkdir()\n", - "os.chdir(path)\n", - "print(Path.cwd())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Test creating a lab\n", - "!madsci -q lab create --name \"test_lab\"\n", - "!madsci -q lab create --name \"test_lab_2\" --description \"A test lab with a description\"\n", - "!madsci -q lab list\n", - "!madsci -q lab validate\n", - "!madsci -q lab info\n", - "!madsci -q lab --name test_lab_2 delete\n", - "!madsci -q lab list\n", - "!madsci -q lab add-command --command_name \"echo\" --command \"echo 'Hello, world!'\"\n", - "!madsci -q lab run echo\n", - "!madsci -q lab delete-command echo\n", - "!madsci -q lab info" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "!madsci -q workcell create --name \"test_workcell\"\n", - "!madsci -q workcell create --name \"test_workcell_2\" --description \"A test workcell with a description\"\n", - "!madsci -q lab info\n", - "!madsci -q workcell list\n", - "!madsci -q workcell info\n", - "!madsci -q workcell validate\n", - "!madsci -q workcell delete\n", - "!madsci -q workcell list\n", - "!madsci -q lab info" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "!madsci -q module create --name \"test_module\"\n", - "!madsci -q module create --name \"test_module_2\" --description \"A test module with a description\"\n", - "!madsci -q lab info\n", - "!madsci -q module list\n", - "!madsci -q module info\n", - "!madsci -q module delete\n", - "!madsci -q module list\n", - "!madsci -q lab info\n", - "!madsci -q module add-command --command_name \"echo\" --command \"echo 'Hello, world!'\"\n", - "!madsci -q module run echo\n", - "!madsci -q module delete-command --command_name echo\n", - "!madsci -q module info" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# *Test CLI node commands\n", - "!madsci -q node create --name \"test_node\"\n", - "!madsci -q node create --name \"test_node_2\" --description \"A test node with a description\"\n", - "!madsci -q workcell info\n", - "!madsci -q node list\n", - "!madsci -q node info\n", - "!madsci -q node delete\n", - "!madsci -q node list\n", - "!madsci -q workcell info\n", - "!madsci -q node add-command --command_name \"echo\" --command \"echo 'Hello, world!'\"\n", - "!madsci -q node run echo\n", - "!madsci -q node delete-command echo\n", - "!madsci -q node info" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": ".venv", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.2" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "from pathlib import Path\n", + "\n", + "if Path.cwd().stem == \"test_lab\":\n", + " os.chdir(\"..\")\n", + "\n", + "path = Path.cwd() / \"test_lab\"\n", + "if path.exists():\n", + " print(\"Directory test_lab/ already exists, removing...\")\n", + "\n", + " def remove_children(path: Path) -> None:\n", + " \"\"\"Recursively remove all children of a directory.\"\"\"\n", + " for child in path.iterdir():\n", + " if child.is_file():\n", + " child.unlink()\n", + " else:\n", + " remove_children(child)\n", + " child.rmdir()\n", + "\n", + " remove_children(path)\n", + " if path.is_dir():\n", + " path.rmdir()\n", + "\n", + "path.mkdir()\n", + "os.chdir(path)\n", + "print(Path.cwd())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Test creating a lab\n", + "!madsci -q lab create --name \"test_lab\"\n", + "!madsci -q lab create --name \"test_lab_2\" --description \"A test lab with a description\"\n", + "!madsci -q lab list\n", + "!madsci -q lab validate\n", + "!madsci -q lab info\n", + "!madsci -q lab --name test_lab_2 delete\n", + "!madsci -q lab list\n", + "!madsci -q lab add-command --command_name \"echo\" --command \"echo 'Hello, world!'\"\n", + "!madsci -q lab run echo\n", + "!madsci -q lab delete-command echo\n", + "!madsci -q lab info" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!madsci -q workcell create --name \"test_workcell\"\n", + "!madsci -q workcell create --name \"test_workcell_2\" --description \"A test workcell with a description\"\n", + "!madsci -q lab info\n", + "!madsci -q workcell list\n", + "!madsci -q workcell info\n", + "!madsci -q workcell validate\n", + "!madsci -q workcell delete\n", + "!madsci -q workcell list\n", + "!madsci -q lab info" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!madsci -q module create --name \"test_module\"\n", + "!madsci -q module create --name \"test_module_2\" --description \"A test module with a description\"\n", + "!madsci -q lab info\n", + "!madsci -q module list\n", + "!madsci -q module info\n", + "!madsci -q module delete\n", + "!madsci -q module list\n", + "!madsci -q lab info\n", + "!madsci -q module add-command --command_name \"echo\" --command \"echo 'Hello, world!'\"\n", + "!madsci -q module run echo\n", + "!madsci -q module delete-command --command_name echo\n", + "!madsci -q module info" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# *Test CLI node commands\n", + "!madsci -q node create --name \"test_node\"\n", + "!madsci -q node create --name \"test_node_2\" --description \"A test node with a description\"\n", + "!madsci -q workcell info\n", + "!madsci -q node list\n", + "!madsci -q node info\n", + "!madsci -q node delete\n", + "!madsci -q node list\n", + "!madsci -q workcell info\n", + "!madsci -q node add-command --command_name \"echo\" --command \"echo 'Hello, world!'\"\n", + "!madsci -q node run echo\n", + "!madsci -q node delete-command echo\n", + "!madsci -q node info" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.2" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/tests/test_module.ipynb b/tests/test_module.ipynb index 7bc2a63..90ddf0b 100644 --- a/tests/test_module.ipynb +++ b/tests/test_module.ipynb @@ -1,79 +1,79 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from rich import print\n", - "\n", - "from madsci.client.node.rest_node_client import RestNodeClient\n", - "from madsci.common.types.action_types import ActionRequest\n", - "from madsci.common.types.node_types import Node" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "node = Node(node_url=\"http://localhost:2000\")\n", - "node_client = RestNodeClient(node)\n", - "\n", - "info = node_client.get_info()\n", - "print(info)\n", - "status = node_client.get_status()\n", - "print(status)\n", - "state = node_client.get_state()\n", - "print(state)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "response = node_client.send_action(\n", - " ActionRequest(action_name=\"run_command\", args={\"command\": \"status\"}),\n", - ")\n", - "print(response)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "action_history = node_client.get_action_history()\n", - "action_id = action_history[0]\n", - "action_response = node_client.get_action_result(action_id)\n", - "print(action_response)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": ".venv", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.2" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from rich import print\n", + "\n", + "from madsci.client.node.rest_node_client import RestNodeClient\n", + "from madsci.common.types.action_types import ActionRequest\n", + "from madsci.common.types.node_types import Node" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "node = Node(node_url=\"http://localhost:2000\")\n", + "node_client = RestNodeClient(node)\n", + "\n", + "info = node_client.get_info()\n", + "print(info)\n", + "status = node_client.get_status()\n", + "print(status)\n", + "state = node_client.get_state()\n", + "print(state)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "response = node_client.send_action(\n", + " ActionRequest(action_name=\"run_command\", args={\"command\": \"status\"}),\n", + ")\n", + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "action_history = node_client.get_action_history()\n", + "action_id = action_history[0]\n", + "action_response = node_client.get_action_result(action_id)\n", + "print(action_response)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.2" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/tests/test_modules/liquidhandler.module.yaml b/tests/test_modules/liquidhandler.module.yaml index f20097a..04f2f5f 100644 --- a/tests/test_modules/liquidhandler.module.yaml +++ b/tests/test_modules/liquidhandler.module.yaml @@ -1,38 +1,38 @@ -module_name: liquidhandler -module_type: device -module_description: null -capabilities: - get_info: false - get_state: false - get_status: false - send_action: false - get_action_result: false - get_action_history: false - action_files: false - send_admin_commands: false - set_config: false - get_resources: false - get_log: false - events: false - resources: false - admin_commands: [] -config: - host: - name: host - description: The host of the REST API. - default: 127.0.0.1 - required: true - reset_on_change: true - port: - name: port - description: The port of the REST API. - default: 2000 - required: true - reset_on_change: true - protocol: - name: protocol - description: The protocol of the REST API, either 'http' or 'https'. - default: http - required: true - reset_on_change: true -commands: {} +module_name: liquidhandler +module_type: device +module_description: null +capabilities: + get_info: false + get_state: false + get_status: false + send_action: false + get_action_result: false + get_action_history: false + action_files: false + send_admin_commands: false + set_config: false + get_resources: false + get_log: false + events: false + resources: false + admin_commands: [] +config: + host: + name: host + description: The host of the REST API. + default: 127.0.0.1 + required: true + reset_on_change: true + port: + name: port + description: The port of the REST API. + default: 2000 + required: true + reset_on_change: true + protocol: + name: protocol + description: The protocol of the REST API, either 'http' or 'https'. + default: http + required: true + reset_on_change: true +commands: {} diff --git a/tests/test_modules/liquidhandler.py b/tests/test_modules/liquidhandler.py index d23c2d8..664adbd 100644 --- a/tests/test_modules/liquidhandler.py +++ b/tests/test_modules/liquidhandler.py @@ -1,53 +1,53 @@ -"""A fake liquid handler module for testing.""" - -from typing import Any - -from madsci.common.types.action_types import ActionResult, ActionSucceeded -from madsci.module.abstract_module import action -from madsci.module.rest_module import RestNode - - -class LiquidHandler: - """A fake liquid handler for testing.""" - - status_code: int = 0 - - def __init__(self) -> "LiquidHandler": - """Initialize the liquid handler.""" - - def run_command(self, command: str) -> None: - """Run a command on the liquid handler.""" - print(f"Running command: {command}") - - -class LiquidHandlerModule(RestNode): - """A fake liquid handler module for testing.""" - - liquid_handler: LiquidHandler = None - - def startup_handler(self) -> None: - """Called to (re)initialize the node. Should be used to open connections to devices or initialize any other resources.""" - self.liquid_handler = LiquidHandler() - - def shutdown_handler(self) -> None: - """Called to shutdown the node. Should be used to close connections to devices or release any other resources.""" - print("Shutting down") - del self.liquid_handler - - def state_handler(self) -> dict[str, Any]: - """Periodically called to get the current state of the node.""" - if self.liquid_handler is not None: - self.node_state = { - "liquid_handler_status_code": self.liquid_handler.status_code, - } - - @action - def run_command(self, command: str) -> ActionResult: - """Run a command on the liquid handler.""" - self.liquid_handler.run_command(command) - return ActionSucceeded() - - -if __name__ == "__main__": - liquid_handler_node = LiquidHandlerModule() - liquid_handler_node.start_node() +"""A fake liquid handler module for testing.""" + +from typing import Any + +from madsci.common.types.action_types import ActionResult, ActionSucceeded +from madsci.module.abstract_module import action +from madsci.module.rest_module import RestNode + + +class LiquidHandler: + """A fake liquid handler for testing.""" + + status_code: int = 0 + + def __init__(self) -> "LiquidHandler": + """Initialize the liquid handler.""" + + def run_command(self, command: str) -> None: + """Run a command on the liquid handler.""" + print(f"Running command: {command}") + + +class LiquidHandlerModule(RestNode): + """A fake liquid handler module for testing.""" + + liquid_handler: LiquidHandler = None + + def startup_handler(self) -> None: + """Called to (re)initialize the node. Should be used to open connections to devices or initialize any other resources.""" + self.liquid_handler = LiquidHandler() + + def shutdown_handler(self) -> None: + """Called to shutdown the node. Should be used to close connections to devices or release any other resources.""" + print("Shutting down") + del self.liquid_handler + + def state_handler(self) -> dict[str, Any]: + """Periodically called to get the current state of the node.""" + if self.liquid_handler is not None: + self.node_state = { + "liquid_handler_status_code": self.liquid_handler.status_code, + } + + @action + def run_command(self, command: str) -> ActionResult: + """Run a command on the liquid handler.""" + self.liquid_handler.run_command(command) + return ActionSucceeded() + + +if __name__ == "__main__": + liquid_handler_node = LiquidHandlerModule() + liquid_handler_node.start_node() diff --git a/tests/test_modules/nodes/default.node.info.yaml b/tests/test_modules/nodes/default.node.info.yaml index efa7019..1ad07d5 100644 --- a/tests/test_modules/nodes/default.node.info.yaml +++ b/tests/test_modules/nodes/default.node.info.yaml @@ -1,59 +1,59 @@ -module_name: liquidhandler -module_type: device -module_description: null -capabilities: - get_info: false - get_state: false - get_status: false - send_action: false - get_action_result: false - get_action_history: false - action_files: false - send_admin_commands: false - set_config: false - get_resources: false - get_log: false - events: false - resources: false - admin_commands: - - reset - - shutdown -config: - host: - name: host - description: The host of the REST API. - default: 127.0.0.1 - required: true - reset_on_change: true - port: - name: port - description: The port of the REST API. - default: 2000 - required: true - reset_on_change: true - protocol: - name: protocol - description: The protocol of the REST API, either 'http' or 'https'. - default: http - required: true - reset_on_change: true -commands: {} -node_name: default -node_id: 01JD7WCXX8Y1CMER8P3XHA6CTW -node_url: null -node_description: Default liquidhandler -module_definition: ../liquidhandler.module.yaml -actions: - run_command: - name: run_command - description: Run a command on the liquid handler. - args: - command: - name: command - description: '' - type: str - required: true - default: null - files: {} - results: {} - blocking: false +module_name: liquidhandler +module_type: device +module_description: null +capabilities: + get_info: false + get_state: false + get_status: false + send_action: false + get_action_result: false + get_action_history: false + action_files: false + send_admin_commands: false + set_config: false + get_resources: false + get_log: false + events: false + resources: false + admin_commands: + - reset + - shutdown +config: + host: + name: host + description: The host of the REST API. + default: 127.0.0.1 + required: true + reset_on_change: true + port: + name: port + description: The port of the REST API. + default: 2000 + required: true + reset_on_change: true + protocol: + name: protocol + description: The protocol of the REST API, either 'http' or 'https'. + default: http + required: true + reset_on_change: true +commands: {} +node_name: default +node_id: 01JD7WCXX8Y1CMER8P3XHA6CTW +node_url: null +node_description: Default liquidhandler +module_definition: ../liquidhandler.module.yaml +actions: + run_command: + name: run_command + description: Run a command on the liquid handler. + args: + command: + name: command + description: '' + type: str + required: true + default: null + files: {} + results: {} + blocking: false diff --git a/tests/test_modules/nodes/default.node.yaml b/tests/test_modules/nodes/default.node.yaml index 686bb63..f1b1863 100644 --- a/tests/test_modules/nodes/default.node.yaml +++ b/tests/test_modules/nodes/default.node.yaml @@ -1,25 +1,25 @@ -node_name: default -node_id: 01JD7WCXX8Y1CMER8P3XHA6CTW -node_url: null -node_description: Default liquidhandler -module_definition: ../liquidhandler.module.yaml -config: - host: - name: host - description: The host of the REST API. - default: 127.0.0.1 - required: true - reset_on_change: true - port: - name: port - description: The port of the REST API. - default: 2000 - required: true - reset_on_change: true - protocol: - name: protocol - description: The protocol of the REST API, either 'http' or 'https'. - default: http - required: true - reset_on_change: true -commands: {} +node_name: default +node_id: 01JD7WCXX8Y1CMER8P3XHA6CTW +node_url: null +node_description: Default liquidhandler +module_definition: ../liquidhandler.module.yaml +config: + host: + name: host + description: The host of the REST API. + default: 127.0.0.1 + required: true + reset_on_change: true + port: + name: port + description: The port of the REST API. + default: 2000 + required: true + reset_on_change: true + protocol: + name: protocol + description: The protocol of the REST API, either 'http' or 'https'. + default: http + required: true + reset_on_change: true +commands: {} From 2128bfeaa7fae30ca7801f1db96b532a957e12a8 Mon Sep 17 00:00:00 2001 From: root Date: Wed, 11 Dec 2024 00:20:05 +0000 Subject: [PATCH 2/5] first pass, uploading workcells, files, workflows --- .../madsci/client/node/__init__.py | 2 + .../client/node/abstract_node_client.py | 11 +- .../madsci/client/node/rest_node_client.py | 23 +-- .../madsci/client/workflow/__init__.py | 9 ++ .../madsci/client/workflow/workflow_client.py | 148 ++++++++++++++++++ .../madsci/common/types/workcell_types.py | 14 +- .../madsci/common/types/workflow_types.py | 36 +++-- .../madsci/tests/workflow_tests.py | 21 +-- .../madsci/workcell_manager/redis_handler.py | 22 +-- .../workcell_manager/schedulers/__init__.py | 1 + .../schedulers/default_scheduler | 34 ++++ .../{ => schedulers}/scheduler.py | 0 .../workcell_manager/workcell_engine.py | 36 +++++ .../workcell_manager/workcell_server.py | 6 +- .../madsci/workcell_manager/workcell_utils.py | 48 ++++++ .../madsci/workcell_manager/workflow_utils.py | 80 +++++++--- tests/example/protocols/protocol.txt | 0 .../workcells/test_workcell.workcell.yaml | 6 +- .../workflows/test_workflow.workflow.yaml | 9 +- tests/test_modules/liquidhandler.module.yaml | 76 ++++----- .../test_modules/nodes/default.node.info.yaml | 118 +++++++------- tests/test_modules/nodes/default.node.yaml | 50 +++--- 22 files changed, 536 insertions(+), 214 deletions(-) create mode 100644 madsci/madsci_client/madsci/client/workflow/__init__.py create mode 100644 madsci/madsci_client/madsci/client/workflow/workflow_client.py create mode 100644 madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/__init__.py create mode 100644 madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler rename madsci/madsci_workcell_manager/madsci/workcell_manager/{ => schedulers}/scheduler.py (100%) create mode 100644 madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_engine.py create mode 100644 madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_utils.py create mode 100644 tests/example/protocols/protocol.txt diff --git a/madsci/madsci_client/madsci/client/node/__init__.py b/madsci/madsci_client/madsci/client/node/__init__.py index c6fb537..4255409 100644 --- a/madsci/madsci_client/madsci/client/node/__init__.py +++ b/madsci/madsci_client/madsci/client/node/__init__.py @@ -7,6 +7,8 @@ "rest_node_client": RestNodeClient, } + + __all__ = [ "NODE_CLIENT_MAP", "AbstractNodeClient", diff --git a/madsci/madsci_client/madsci/client/node/abstract_node_client.py b/madsci/madsci_client/madsci/client/node/abstract_node_client.py index fb3f566..a1bf685 100644 --- a/madsci/madsci_client/madsci/client/node/abstract_node_client.py +++ b/madsci/madsci_client/madsci/client/node/abstract_node_client.py @@ -1,6 +1,7 @@ """Base node client implementation.""" from typing import Any, ClassVar +from pydantic import AnyUrl from madsci.common.types.action_types import ( ActionRequest, @@ -20,7 +21,6 @@ ) from madsci.common.types.resource_types import ResourceDefinition - class AbstractNodeClient: """Base Node Client, protocol agnostic, all node clients should inherit from or be based on this.""" @@ -30,9 +30,9 @@ class AbstractNodeClient: supported_capabilities: ClassVar[NodeClientCapabilities] = NodeClientCapabilities() """The capabilities supported by this node client.""" - def __init__(self, node: Node) -> "AbstractNodeClient": + def __init__(self, url: AnyUrl) -> "AbstractNodeClient": """Initialize the client.""" - self.node = node + self.url = url def send_action(self, action_request: ActionRequest) -> ActionResult: """Perform an action on the node.""" @@ -77,3 +77,8 @@ def get_resources(self) -> dict[str, ResourceDefinition]: def get_log(self) -> list[Event]: """Get the log of the node.""" raise NotImplementedError("get_log is not implemented by this client") + @classmethod + def validate_url(cls, url: AnyUrl) -> bool: + """check if a url matches this node type""" + protocol = url.scheme + return protocol in cls.url_protocols diff --git a/madsci/madsci_client/madsci/client/node/rest_node_client.py b/madsci/madsci_client/madsci/client/node/rest_node_client.py index c70bd69..5ee3227 100644 --- a/madsci/madsci_client/madsci/client/node/rest_node_client.py +++ b/madsci/madsci_client/madsci/client/node/rest_node_client.py @@ -3,6 +3,7 @@ import json from pathlib import Path from typing import Any, ClassVar +from pydantic import AnyUrl import requests @@ -44,9 +45,9 @@ class RestNodeClient(AbstractNodeClient): get_resources=False, ) - def __init__(self, node: Node) -> "RestNodeClient": + def __init__(self, url: AnyUrl) -> "RestNodeClient": """Initialize the client.""" - super().__init__(node) + super().__init__(url) def send_action(self, action_request: ActionRequest) -> ActionResult: """Perform an action on the node.""" @@ -59,7 +60,7 @@ def send_action(self, action_request: ActionRequest) -> ActionResult: print(files) rest_response = requests.post( - f"{self.node.node_url}/action", + f"{self.url}/action", params={ "action_name": action_request.action_name, "args": json.dumps(action_request.args), @@ -78,7 +79,7 @@ def send_action(self, action_request: ActionRequest) -> ActionResult: def get_action_history(self) -> list[str]: """Get a list of the action IDs for actions that the node has recently performed.""" - response = requests.get(f"{self.node.node_url}/action", timeout=10) + response = requests.get(f"{self.url}/action", timeout=10) if not response.ok: response.raise_for_status() return response.json() @@ -86,7 +87,7 @@ def get_action_history(self) -> list[str]: def get_action_result(self, action_id: str) -> ActionResult: """Get the result of an action on the node.""" response = requests.get( - f"{self.node.node_url}/action/{action_id}", + f"{self.url}/action/{action_id}", timeout=10, ) if not response.ok: @@ -95,21 +96,21 @@ def get_action_result(self, action_id: str) -> ActionResult: def get_status(self) -> NodeStatus: """Get the status of the node.""" - response = requests.get(f"{self.node.node_url}/status", timeout=10) + response = requests.get(f"{self.url}/status", timeout=10) if not response.ok: response.raise_for_status() return NodeStatus.model_validate(response.json()) def get_state(self) -> dict[str, Any]: """Get the state of the node.""" - response = requests.get(f"{self.node.node_url}/state", timeout=10) + response = requests.get(f"{self.url}/state", timeout=10) if not response.ok: response.raise_for_status() return response.json() def get_info(self) -> NodeInfo: """Get information about the node and module.""" - response = requests.get(f"{self.node.node_url}/info", timeout=10) + response = requests.get(f"{self.url}/info", timeout=10) if not response.ok: response.raise_for_status() return NodeInfo.model_validate(response.json()) @@ -117,7 +118,7 @@ def get_info(self) -> NodeInfo: def set_config(self, config_dict: dict[str, Any]) -> NodeSetConfigResponse: """Set configuration values of the node.""" response = requests.post( - f"{self.node.node_url}/config", + f"{self.url}/config", json=config_dict, timeout=60, ) @@ -128,7 +129,7 @@ def set_config(self, config_dict: dict[str, Any]) -> NodeSetConfigResponse: def send_admin_command(self, admin_command: AdminCommands) -> bool: """Perform an administrative command on the node.""" response = requests.post( - f"{self.node.node_url}/admin", + f"{self.url}/admin", json={"admin_command": admin_command}, timeout=10, ) @@ -145,7 +146,7 @@ def get_resources(self) -> dict[str, ResourceDefinition]: def get_log(self) -> list[Event]: """Get the log from the node""" - response = requests.get(f"{self.node.node_url}/log", timeout=10) + response = requests.get(f"{self.url}/log", timeout=10) if not response.ok: response.raise_for_status() return response.json() diff --git a/madsci/madsci_client/madsci/client/workflow/__init__.py b/madsci/madsci_client/madsci/client/workflow/__init__.py new file mode 100644 index 0000000..11eee32 --- /dev/null +++ b/madsci/madsci_client/madsci/client/workflow/__init__.py @@ -0,0 +1,9 @@ +"""MADSci workflow client implementations.""" + +from madsci.client.workflow.workflow_client import WorkflowClient + + + +__all__ = [ + "WorkflowClient", +] diff --git a/madsci/madsci_client/madsci/client/workflow/workflow_client.py b/madsci/madsci_client/madsci/client/workflow/workflow_client.py new file mode 100644 index 0000000..c7ba4f7 --- /dev/null +++ b/madsci/madsci_client/madsci/client/workflow/workflow_client.py @@ -0,0 +1,148 @@ +from madsci.common.types.workflow_types import Workflow, WorkflowDefinition +from typing import Any + +from pathlib import Path +import re +import copy +import requests +import json + +class WorkflowClient: + """a client for running workflows""" + def __init__(self, workcell_manager_url: str, working_directory: str="~/.MADsci/temp") -> "WorkflowClient": + """initialize the client""" + self.url = workcell_manager_url + self.working_directory = Path(working_directory) + + def send_workflow(self, workflow: str, parameters: dict, validate_only: bool = False) -> Workflow: + """send a workflow to the workcell manager""" + workflow = WorkflowDefinition.from_yaml(workflow) + print(workflow) + WorkflowDefinition.model_validate(workflow) + insert_parameter_values(workflow=workflow, parameters=parameters) + files = self._extract_files_from_workflow(workflow) + url = self.url + "/start_workflow" + response = requests.post( + url, + data={ + "workflow": workflow.model_dump_json(), + "parameters": json.dumps(parameters), + "validate_only": validate_only + }, + files={ + ("files", (str(Path(path).name), Path.open(path, "rb"))) + for _, path in files.items() + }, + ) + print(response) + def _extract_files_from_workflow( + self, workflow: WorkflowDefinition + ) -> dict[str, Any]: + """ + Returns a dictionary of files from a workflow + """ + files = {} + for step in workflow.flowdef: + if step.files: + for file, path in step.files.items(): + # * Try to get the file from the payload, if applicable + unique_filename = f"{step.step_id}_{file}" + files[unique_filename] = path + if not Path(files[unique_filename]).is_absolute(): + files[unique_filename] = ( + self.working_directory / files[unique_filename] + ) + step.files[file] = Path(files[unique_filename]).name + return files + + +def insert_parameter_values(workflow: WorkflowDefinition, parameters: dict[str, Any]) -> Workflow: + """Replace the parameter strings in the workflow with the provided values""" + for param in workflow.parameters: + if param.name not in parameters: + if param.default: + parameters[param.name] = param.default + else: + raise ValueError( + "Workflow parameter: " + + param.name + + " not provided, and no default value is defined." + ) + steps = [] + for step in workflow.flowdef: + for key, val in iter(step): + if type(val) is str: + setattr(step, key, value_substitution(val, parameters)) + + step.args = walk_and_replace(step.args, parameters) + steps.append(step) + workflow.flowdef = steps + + +def walk_and_replace(args: dict[str, Any], input_parameters: dict[str, Any]) -> dict[str, Any]: + """Recursively walk the arguments and replace all parameters""" + new_args = copy.deepcopy(args) + for key, val in args.items(): + if type(val) is str: + new_args[key] = value_substitution(val, input_parameters) + elif type(args[key]) is dict: + new_args[key] = walk_and_replace(val, input_parameters) + if type(key) is str: + new_key = value_substitution(key, input_parameters) + new_args[new_key] = new_args[key] + if key is not new_key: + new_args.pop(key, None) + return new_args + + +def value_substitution(input_string: str, input_parameters: dict[str, Any]) -> str: + """Perform $-string substitution on input string, returns string with substituted values""" + # * Check if the string is a simple parameter reference + if type(input_string) is str and re.match(r"^\$[A-z0-9_\-]*$", input_string): + if input_string.strip("$") in input_parameters: + input_string = input_parameters[input_string.strip("$")] + else: + raise ValueError( + "Unknown parameter:" + + input_string + + ", please define it in the parameters section of the Workflow Definition." + ) + else: + # * Replace all parameter references contained in the string + working_string = input_string + for match in re.findall(r"((? Any: - -class Workflow(WorkflowDefinition): - """Container for a workflow run""" - - label: Optional[str] = None - """Label for the workflow run""" - run_id: str = Field(default_factory=new_ulid_str) - """ID of the workflow run""" - payload: dict[str, Any] = {} - """input information for a given workflow run""" +class SchedulerMetadata(BaseModel): status: WorkflowStatus = Field(default=WorkflowStatus.NEW) """current status of the workflow""" - steps: list[Step] = [] - """WEI Processed Steps of the flow""" - experiment_id: str - """ID of the experiment this workflow is a part of""" step_index: int = 0 """Index of the current step""" simulate: bool = False @@ -114,6 +101,23 @@ class Workflow(WorkflowDefinition): duration: Optional[timedelta] = None """Duration of the workflow's run""" + + +class Workflow(WorkflowDefinition): + """Container for a workflow run""" + scheduler_metadata: SchedulerMetadata = Field(default_factory=SchedulerMetadata) + """scheduler information for the workflow run""" + label: Optional[str] = None + """Label for the workflow run""" + run_id: str = Field(default_factory=new_ulid_str) + """ID of the workflow run""" + steps: list[Step] = [] + """WEI Processed Steps of the flow""" + parameter_values: dict[str, Any] = Field(default_factory={}) + """parameter values used inthis workflow""" + experiment_id: Optional[str] = None + """ID of the experiment this workflow is a part of""" + def get_step_by_name(self, name: str) -> Step: """Return the step object by its name""" for step in self.steps: diff --git a/madsci/madsci_workcell_manager/madsci/tests/workflow_tests.py b/madsci/madsci_workcell_manager/madsci/tests/workflow_tests.py index 64e9fe7..c684b2d 100644 --- a/madsci/madsci_workcell_manager/madsci/tests/workflow_tests.py +++ b/madsci/madsci_workcell_manager/madsci/tests/workflow_tests.py @@ -1,19 +1,8 @@ from madsci.common.types.workflow_types import WorkflowDefinition +from madsci.madsci_client.madsci.client.workflow.workflow_client import WorkflowClient import requests from pathlib import Path -def test_send_workflow(workflow: str, parameters: dict): - workflow = WorkflowDefinition.from_yaml(workflow) - WorkflowDefinition.model_validate(workflow) - url = f"http://localhost:8013/start_workflow" - response = requests.post( - url, - data={ - "workflow": workflow.model_dump_json(), - "parameters":{} - }, - files=[] - ) - print("hi") - print(response) - -test_send_workflow(Path("../../../../tests/example/workflows/test_workflow.workflow.yaml").resolve(), {}) \ No newline at end of file + + +client = WorkflowClient("http://localhost:8013") +client.send_workflow(Path("../../../../tests/example/workflows/test_workflow.workflow.yaml").resolve(), {}) \ No newline at end of file diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py index fa4c35f..ec653fb 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py @@ -59,7 +59,11 @@ def _workcell(self) -> RedisDict: return RedisDict( key=f"{self._workcell_prefix}:workcell", redis=self._redis_client ) - + @property + def _nodes(self) -> RedisDict: + return RedisDict( + key=f"{self._workcell_prefix}:nodes", redis=self._redis_client + ) @property def _workflows(self) -> RedisDict: return RedisDict( @@ -86,8 +90,7 @@ def get_state(self) -> Dict[str, Dict[Any, Any]]: return { "status": self.wc_status, "error": self.error, - "locations": self._locations.to_dict(), - "modules": self._modules.to_dict(), + "nodes": self._nodes.to_dict(), "workflows": self._workflow_runs.to_dict(), "workcell": self._workcell.to_dict(), "paused": self.paused, @@ -109,17 +112,14 @@ def error(self, value: str) -> None: return self._redis_client.set(f"{self._workcell_prefix}:error", value) def clear_state( - self, reset_locations: bool = True, clear_workflow_runs: bool = False + self, clear_workflows: bool = False ) -> None: """ Clears the state of the workcell, optionally leaving the locations state intact. """ - self._modules.clear() - if reset_locations: - self._locations.clear() - if clear_workflow_runs: - self._workflow_runs.clear() - self._workcell.clear() + self._nodes.clear() + if clear_workflows: + self._workflows.clear() self.state_change_marker = "0" self.paused = False self.locked = False @@ -183,7 +183,7 @@ def get_all_workflows(self) -> dict[str, Workflow]: Returns all workflow runs """ valid_workflows = {} - for run_id, workflow in self._workflow.to_dict().items(): + for run_id, workflow in self._workflows.to_dict().items(): try: valid_workflows[str(run_id)] = Workflow.model_validate( workflow diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/__init__.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/__init__.py new file mode 100644 index 0000000..52e9b73 --- /dev/null +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/__init__.py @@ -0,0 +1 @@ +"""MADSci Workcell Manager.""" diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler b/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler new file mode 100644 index 0000000..078c5d9 --- /dev/null +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler @@ -0,0 +1,34 @@ +from scheduler import Scheduler +class DefaultScheduler(Scheduler): + + def run_iteration(self): + for run_id, wf in self.state_handler.get_all_workflow_runs().items(): + if wf.status == WorkflowStatus.NEW: + wf.status = WorkflowStatus.QUEUED + print( + f"Processed new workflow: {wf.name} with run_id: {run_id}" + ) + #send_event(WorkflowQueuedEvent.from_wf_run(wf_run=wf_run)) + self.state_handler.set_workflow_run(wf) + elif wf.status in [ + WorkflowStatus.QUEUED, + WorkflowStatus.IN_PROGRESS, + ]: + step = wf.steps[wf.step_index] + if check_step(wf.experiment_id, run_id, step): + module = find_step_module( + self.state_handler.get_workcell(), step.module + ) + + #if wf_run.status == WorkflowStatus.QUEUED: + #send_event(WorkflowStartEvent.from_wf_run(wf_run=wf_run)) + wf.status = WorkflowStatus.RUNNING + print( + f"Starting step {wf.name}.{step.name} for run: {run_id}" + ) + if wf.step_index == 0: + wf.start_time = datetime.now() + self.state_handler.set_workflow_run(wf) + run_step(wf_run=wf, module=module) + + diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/scheduler.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/scheduler.py similarity index 100% rename from madsci/madsci_workcell_manager/madsci/workcell_manager/scheduler.py rename to madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/scheduler.py diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_engine.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_engine.py new file mode 100644 index 0000000..2f98d55 --- /dev/null +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_engine.py @@ -0,0 +1,36 @@ +""" +Engine Class and associated helpers and data +""" + +import time +import traceback + +import requests +import importlib +#from schedulers.default_scheduler import DefaultScheduler +from workcell_utils import initialize_state +from workcell_manager_types import WorkcellManagerDefinition +from redis_handler import WorkcellRedisHandler +from workflow_utils import cancel_active_workflows + +class Engine: + """ + Handles scheduling workflows and executing steps on the workcell. + Pops incoming workflows off a redis-based queue and executes them. + """ + + def __init__(self, workcell_manager_definition: WorkcellManagerDefinition, state_manager: WorkcellRedisHandler) -> None: + """Initialize the scheduler.""" + state_manager.clear_state( + clear_workflows=workcell_manager_definition.plugin_config.clear_workflows + ) + cancel_active_workflows(state_manager) + #self.scheduler = DefaultScheduler() + with state_manager.wc_state_lock(): + initialize_state(state_manager) + #time.sleep(workcell_manager_definition.plugin_config.cold_start_delay) + + print("Engine initialized, waiting for workflows...") + #send_event(WorkcellStartEvent(workcell=state_manager.get_workcell())) + + \ No newline at end of file diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py index 69d992a..5cfdf38 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py @@ -9,6 +9,7 @@ ) from madsci.workcell_manager.workflow_utils import create_workflow, save_workflow_files +from workcell_engine import Engine from typing import Annotated, Optional from madsci.common.types.workcell_types import WorkcellDefinition @@ -28,6 +29,7 @@ async def lifespan(app: FastAPI) -> None: app.state.state_handler=WorkcellRedisHandler(workcell_manager_definition) app.state.state_handler.set_workcell(workcell) + Engine(workcell_manager_definition, app.state.state_handler) yield app = FastAPI(lifespan=lifespan) @@ -42,7 +44,7 @@ def get_workcell() -> WorkcellDefinition: return app.state.state_handler.get_workcell() @app.post("/start_workflow") -async def start_run( +async def start_workflow( workflow: Annotated[str, Form()], experiment_id: Annotated[Optional[str], Form()] = None, parameters: Annotated[Optional[str], Form()] = None, @@ -96,7 +98,7 @@ async def start_run( ) if not validate_only: - wf = save_workflow_files(wf=wf, files=files) + wf = save_workflow_files(working_directory=workcell_manager_definition.plugin_config.workcell_directory, workflow=wf, files=files) with app.state.state_handler.wc_state_lock(): app.state.state_handler.set_workflow(wf) return wf diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_utils.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_utils.py new file mode 100644 index 0000000..512c915 --- /dev/null +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_utils.py @@ -0,0 +1,48 @@ +from redis_handler import WorkcellRedisHandler +from pydantic import AnyUrl +from madsci.common.types.workcell_types import WorkcellDefinition +from madsci.common.types.node_types import NodeStatus, Node, NodeDefinition +from madsci.client.node import AbstractNodeClient, NODE_CLIENT_MAP + +def initialize_state(state_manager: WorkcellRedisHandler, workcell=None) -> None: + """ + Initializes the state of the workcell from the workcell definition. + """ + + if not workcell: + workcell = state_manager.get_workcell() + initialize_workcell_nodes(workcell) + initialize_workcell_resources(workcell) + +def initialize_workcell_nodes(workcell): + for value in workcell.nodes.values(): + if type(value) is NodeDefinition: + url = value.url + elif type(value) is AnyUrl: + url = value + elif type(value) is str: + url = AnyUrl(value) + update_node_info(url, workcell) + update_node_status(url, workcell) + + +def initialize_workcell_resources(workcell): + pass + +def update_node_info(url: AnyUrl, workcell: WorkcellDefinition): + client = find_node_client(url) + print(client.get_info()) + +def update_node_status(url: AnyUrl, workcell: WorkcellDefinition): + client = find_node_client(url) + print(client.get_status()) + +def find_node_client(url: str) -> AbstractNodeClient: + """finds the right client for the node url provided""" + for client in NODE_CLIENT_MAP.values(): + if client.validate_url(url): + return client(url) + for client in AbstractNodeClient.__subclasses__(): + if client.validate_url(url): + return client(url) + return None diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py index bb4c4ad..ea132d3 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py @@ -1,38 +1,24 @@ """Utility function for the workcell manager.""" from madsci.common.types.workcell_types import WorkcellDefinition -from madsci.common.types.workflow_types import Workflow, WorkflowDefinition +from madsci.common.types.workflow_types import Workflow, WorkflowDefinition, WorkflowStatus from madsci.common.types.step_types import Step from madsci.common.types.node_types import Node +from redis_handler import WorkcellRedisHandler from typing import Optional, Any from fastapi import UploadFile +import re +import copy +from pathlib import Path -def find_step_node(workcell: WorkcellDefinition, step_module: str) -> Optional[Node]: - """finds the full module information based on just its name - - Parameters - ---------- - step_module : str - the name of the module - Returns - ------- - module: Module - The class with full information about the given module - """ - for node in workcell.nodes: - node_name = node.name - if node_name == step_module: - return node - - raise ValueError(f"Module {step_module} not in Workcell {workcell.name}") def validate_node_names(workflow: Workflow, workcell: WorkcellDefinition) -> None: """ Validates that the nodes in the workflow.flowdef are in the workcell.modules """ - [ - find_step_node(workcell, node_name) - for node_name in [step.node for step in workflow.flowdef] - ] + for node_name in [step.node for step in workflow.flowdef]: + if not node_name in workcell.nodes: + raise ValueError(f"Node {node_name} not in Workcell {workcell.name}") + def replace_positions(workcell: WorkcellDefinition, step: Step): """Allow the user to put location names instead of """ pass @@ -77,9 +63,9 @@ def create_workflow( wf_dict.update( { "label": workflow_def.name, - "parameters": parameters, "experiment_id": experiment_id, "simulate": simulate, + "parameter_values": parameters } ) wf = Workflow(**wf_dict) @@ -96,8 +82,52 @@ def create_workflow( wf.steps = steps return wf -def save_workflow_files(wf: Workflow, files: list[UploadFile]) -> Workflow: + +def save_workflow_files(working_directory: str, workflow: Workflow, files: list[UploadFile]) -> Workflow: """Saves the files to the workflow run directory, and updates the step files to point to the new location""" + get_workflow_inputs_directory( + workflow_run_id=workflow.run_id, + working_directory=working_directory + ).mkdir(parents=True, exist_ok=True) + if files: + for file in files: + file_path = ( + get_workflow_inputs_directory( + working_directory=working_directory, + workflow_run_id=workflow.run_id, + ) + / file.filename + ) + with Path.open(file_path, "wb") as f: + f.write(file.file.read()) + for step in workflow.steps: + for step_file_key, step_file_path in step.files.items(): + if step_file_path == file.filename: + step.files[step_file_key] = str(file_path) + print(f"{step_file_key}: {file_path} ({step_file_path})") + return workflow + +def get_workflow_inputs_directory(workflow_run_id: str = None, working_directory: str = None) -> Path: + """returns a directory name for the workflows inputs""" + return Path(working_directory) / "Workflows" / workflow_run_id / "Inputs" + + +def cancel_workflow(wf: Workflow, state_manager: WorkcellRedisHandler) -> None: + """Cancels the workflow run""" + wf.scheduler_metadata.status = WorkflowStatus.CANCELLED + with state_manager.wc_state_lock(): + state_manager.set_workflow_run(wf) return wf + + +def cancel_active_workflows(state_manager: WorkcellRedisHandler) -> None: + """Cancels all currently running workflow runs""" + for wf in state_manager.get_all_workflows().values(): + if wf.scheduler_metadata.status in [ + WorkflowStatus.RUNNING, + WorkflowStatus.QUEUED, + WorkflowStatus.IN_PROGRESS, + ]: + cancel_workflow(wf) diff --git a/tests/example/protocols/protocol.txt b/tests/example/protocols/protocol.txt new file mode 100644 index 0000000..e69de29 diff --git a/tests/example/workcells/test_workcell.workcell.yaml b/tests/example/workcells/test_workcell.workcell.yaml index 975fd61..12041e3 100644 --- a/tests/example/workcells/test_workcell.workcell.yaml +++ b/tests/example/workcells/test_workcell.workcell.yaml @@ -5,5 +5,9 @@ config: scheduler_update_interval: 0.1 node_update_interval: 1.0 auto_start: true + workcell_directory: "/.MADSci/Workcell" -nodes: {} +nodes: { + "liquid_handler": "http://localhost:2000" + +} diff --git a/tests/example/workflows/test_workflow.workflow.yaml b/tests/example/workflows/test_workflow.workflow.yaml index b473a2d..9e7fe23 100644 --- a/tests/example/workflows/test_workflow.workflow.yaml +++ b/tests/example/workflows/test_workflow.workflow.yaml @@ -1,18 +1,17 @@ name: Test_Workflow -metadata: +workflow_metadata: author: Tobias Ginsburg, Kyle Hippe, Ryan D. Lewis - info: Example workflow for WEI + description: Example workflow for WEI version: 0.3 parameters: - name: delay default: 1.5 - - name: pos - - name: aim flowdef: - - name: Get plate to $pos + - name: Get plate to thingy node: transfer action: transfer args: target: thingy comment: Get a new plate + files: {"protocol": "/workspaces/MADSci/tests/example/protocols/protocol.txt"} diff --git a/tests/test_modules/liquidhandler.module.yaml b/tests/test_modules/liquidhandler.module.yaml index 04f2f5f..f20097a 100644 --- a/tests/test_modules/liquidhandler.module.yaml +++ b/tests/test_modules/liquidhandler.module.yaml @@ -1,38 +1,38 @@ -module_name: liquidhandler -module_type: device -module_description: null -capabilities: - get_info: false - get_state: false - get_status: false - send_action: false - get_action_result: false - get_action_history: false - action_files: false - send_admin_commands: false - set_config: false - get_resources: false - get_log: false - events: false - resources: false - admin_commands: [] -config: - host: - name: host - description: The host of the REST API. - default: 127.0.0.1 - required: true - reset_on_change: true - port: - name: port - description: The port of the REST API. - default: 2000 - required: true - reset_on_change: true - protocol: - name: protocol - description: The protocol of the REST API, either 'http' or 'https'. - default: http - required: true - reset_on_change: true -commands: {} +module_name: liquidhandler +module_type: device +module_description: null +capabilities: + get_info: false + get_state: false + get_status: false + send_action: false + get_action_result: false + get_action_history: false + action_files: false + send_admin_commands: false + set_config: false + get_resources: false + get_log: false + events: false + resources: false + admin_commands: [] +config: + host: + name: host + description: The host of the REST API. + default: 127.0.0.1 + required: true + reset_on_change: true + port: + name: port + description: The port of the REST API. + default: 2000 + required: true + reset_on_change: true + protocol: + name: protocol + description: The protocol of the REST API, either 'http' or 'https'. + default: http + required: true + reset_on_change: true +commands: {} diff --git a/tests/test_modules/nodes/default.node.info.yaml b/tests/test_modules/nodes/default.node.info.yaml index 1ad07d5..efa7019 100644 --- a/tests/test_modules/nodes/default.node.info.yaml +++ b/tests/test_modules/nodes/default.node.info.yaml @@ -1,59 +1,59 @@ -module_name: liquidhandler -module_type: device -module_description: null -capabilities: - get_info: false - get_state: false - get_status: false - send_action: false - get_action_result: false - get_action_history: false - action_files: false - send_admin_commands: false - set_config: false - get_resources: false - get_log: false - events: false - resources: false - admin_commands: - - reset - - shutdown -config: - host: - name: host - description: The host of the REST API. - default: 127.0.0.1 - required: true - reset_on_change: true - port: - name: port - description: The port of the REST API. - default: 2000 - required: true - reset_on_change: true - protocol: - name: protocol - description: The protocol of the REST API, either 'http' or 'https'. - default: http - required: true - reset_on_change: true -commands: {} -node_name: default -node_id: 01JD7WCXX8Y1CMER8P3XHA6CTW -node_url: null -node_description: Default liquidhandler -module_definition: ../liquidhandler.module.yaml -actions: - run_command: - name: run_command - description: Run a command on the liquid handler. - args: - command: - name: command - description: '' - type: str - required: true - default: null - files: {} - results: {} - blocking: false +module_name: liquidhandler +module_type: device +module_description: null +capabilities: + get_info: false + get_state: false + get_status: false + send_action: false + get_action_result: false + get_action_history: false + action_files: false + send_admin_commands: false + set_config: false + get_resources: false + get_log: false + events: false + resources: false + admin_commands: + - reset + - shutdown +config: + host: + name: host + description: The host of the REST API. + default: 127.0.0.1 + required: true + reset_on_change: true + port: + name: port + description: The port of the REST API. + default: 2000 + required: true + reset_on_change: true + protocol: + name: protocol + description: The protocol of the REST API, either 'http' or 'https'. + default: http + required: true + reset_on_change: true +commands: {} +node_name: default +node_id: 01JD7WCXX8Y1CMER8P3XHA6CTW +node_url: null +node_description: Default liquidhandler +module_definition: ../liquidhandler.module.yaml +actions: + run_command: + name: run_command + description: Run a command on the liquid handler. + args: + command: + name: command + description: '' + type: str + required: true + default: null + files: {} + results: {} + blocking: false diff --git a/tests/test_modules/nodes/default.node.yaml b/tests/test_modules/nodes/default.node.yaml index f1b1863..686bb63 100644 --- a/tests/test_modules/nodes/default.node.yaml +++ b/tests/test_modules/nodes/default.node.yaml @@ -1,25 +1,25 @@ -node_name: default -node_id: 01JD7WCXX8Y1CMER8P3XHA6CTW -node_url: null -node_description: Default liquidhandler -module_definition: ../liquidhandler.module.yaml -config: - host: - name: host - description: The host of the REST API. - default: 127.0.0.1 - required: true - reset_on_change: true - port: - name: port - description: The port of the REST API. - default: 2000 - required: true - reset_on_change: true - protocol: - name: protocol - description: The protocol of the REST API, either 'http' or 'https'. - default: http - required: true - reset_on_change: true -commands: {} +node_name: default +node_id: 01JD7WCXX8Y1CMER8P3XHA6CTW +node_url: null +node_description: Default liquidhandler +module_definition: ../liquidhandler.module.yaml +config: + host: + name: host + description: The host of the REST API. + default: 127.0.0.1 + required: true + reset_on_change: true + port: + name: port + description: The port of the REST API. + default: 2000 + required: true + reset_on_change: true + protocol: + name: protocol + description: The protocol of the REST API, either 'http' or 'https'. + default: http + required: true + reset_on_change: true +commands: {} From 8f119a565260668b7ae85de23b54de537f1fee8e Mon Sep 17 00:00:00 2001 From: root Date: Fri, 13 Dec 2024 00:20:32 +0000 Subject: [PATCH 3/5] way too much probably --- .../madsci/client/node/rest_node_client.py | 6 +- .../madsci/client/workflow/workflow_client.py | 4 +- .../madsci/common/types/node_types.py | 9 +- .../madsci/common/types/workcell_types.py | 11 ++ .../madsci/common/types/workflow_types.py | 4 +- .../madsci/module/abstract_module.py | 8 +- .../madsci/module/rest_module.py | 3 +- .../madsci/workcell_manager/redis_handler.py | 78 +++++++++++-- .../workcell_manager/schedulers/__init__.py | 2 +- .../schedulers/default_scheduler | 34 ------ .../schedulers/default_scheduler.py | 108 ++++++++++++++++++ .../workcell_manager/schedulers/scheduler.py | 52 +-------- .../workcell_manager/workcell_engine.py | 50 +++++++- .../workcell_manager_types.py | 1 + .../workcell_manager/workcell_server.py | 39 ++++++- .../madsci/workcell_manager/workcell_utils.py | 61 +++++++--- .../madsci/workcell_manager/workflow_utils.py | 60 +++++++--- .../workcells/test_workcell.workcell.yaml | 5 +- .../workflows/test_workflow.workflow.yaml | 13 ++- 19 files changed, 399 insertions(+), 149 deletions(-) delete mode 100644 madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler create mode 100644 madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler.py diff --git a/madsci/madsci_client/madsci/client/node/rest_node_client.py b/madsci/madsci_client/madsci/client/node/rest_node_client.py index 5ee3227..75eb93f 100644 --- a/madsci/madsci_client/madsci/client/node/rest_node_client.py +++ b/madsci/madsci_client/madsci/client/node/rest_node_client.py @@ -57,7 +57,7 @@ def send_action(self, action_request: ActionRequest) -> ActionResult: ("files", (file, Path(path).open("rb"))) # noqa: SIM115 for file, path in action_request.files.items() ] - print(files) + rest_response = requests.post( f"{self.url}/action", @@ -72,7 +72,7 @@ def send_action(self, action_request: ActionRequest) -> ActionResult: finally: # * Ensure files are closed for file in files: - file[1].close() + file[1][1].close() if not rest_response.ok: rest_response.raise_for_status() return ActionResult.model_validate(rest_response.json()) @@ -129,7 +129,7 @@ def set_config(self, config_dict: dict[str, Any]) -> NodeSetConfigResponse: def send_admin_command(self, admin_command: AdminCommands) -> bool: """Perform an administrative command on the node.""" response = requests.post( - f"{self.url}/admin", + f"{self.url}/admin/{admin_command}", json={"admin_command": admin_command}, timeout=10, ) diff --git a/madsci/madsci_client/madsci/client/workflow/workflow_client.py b/madsci/madsci_client/madsci/client/workflow/workflow_client.py index c7ba4f7..9caaa67 100644 --- a/madsci/madsci_client/madsci/client/workflow/workflow_client.py +++ b/madsci/madsci_client/madsci/client/workflow/workflow_client.py @@ -17,7 +17,6 @@ def __init__(self, workcell_manager_url: str, working_directory: str="~/.MADsci/ def send_workflow(self, workflow: str, parameters: dict, validate_only: bool = False) -> Workflow: """send a workflow to the workcell manager""" workflow = WorkflowDefinition.from_yaml(workflow) - print(workflow) WorkflowDefinition.model_validate(workflow) insert_parameter_values(workflow=workflow, parameters=parameters) files = self._extract_files_from_workflow(workflow) @@ -30,11 +29,10 @@ def send_workflow(self, workflow: str, parameters: dict, validate_only: bool = F "validate_only": validate_only }, files={ - ("files", (str(Path(path).name), Path.open(path, "rb"))) + ("files", (str(Path(path).name), Path.open(Path(path), "rb"))) for _, path in files.items() }, ) - print(response) def _extract_files_from_workflow( self, workflow: WorkflowDefinition ) -> dict[str, Any]: diff --git a/madsci/madsci_common/madsci/common/types/node_types.py b/madsci/madsci_common/madsci/common/types/node_types.py index 6d253fc..603692b 100644 --- a/madsci/madsci_common/madsci/common/types/node_types.py +++ b/madsci/madsci_common/madsci/common/types/node_types.py @@ -120,18 +120,23 @@ class Node(BaseModel, arbitrary_types_allowed=True): node_url: AnyUrl = Field( title="Node URL", - description="The URL used to communicate with the module.", + description="The URL used to communicate with the node.", ) status: Optional["NodeStatus"] = Field( default=None, title="Module Status", - description="The status of the module. Set to None if the module does not support status reporting or the status is unknown (e.g. if it hasn't reported/responded to status requests).", + description="The status of the node. Set to None if the node does not support status reporting or the status is unknown (e.g. if it hasn't reported/responded to status requests).", ) info: Optional["NodeInfo"] = Field( default=None, title="Node Info", description="Information about the node, provided by the node itself.", ) + state: Optional[dict[str, Any]] = Field( + default=None, + title="Node State", + description="Detailed nodes specific state information" + ) class NodeInfo(NodeDefinition, NodeModuleDefinition): diff --git a/madsci/madsci_common/madsci/common/types/workcell_types.py b/madsci/madsci_common/madsci/common/types/workcell_types.py index fa3a56c..44764e4 100644 --- a/madsci/madsci_common/madsci/common/types/workcell_types.py +++ b/madsci/madsci_common/madsci/common/types/workcell_types.py @@ -99,3 +99,14 @@ class WorkcellConfig(BaseModel): title="Clear Workflows", description="Whether the workcell should clear old workflows on restart", ) + cold_start_delay: int = Field( + default=0, + title="Cold Start Delay", + description="How long the Workcell engine should sleep on startup", + ) + scheduler: str = Field( + default="schedulers.default_scheduler", + title="scheduler", + description="Scheduler module in the workcell manager scheduler folder with a Scheduler class that inherits from AbstractScheduler to use" + + ) \ No newline at end of file diff --git a/madsci/madsci_common/madsci/common/types/workflow_types.py b/madsci/madsci_common/madsci/common/types/workflow_types.py index 91f7a8b..ba092ce 100644 --- a/madsci/madsci_common/madsci/common/types/workflow_types.py +++ b/madsci/madsci_common/madsci/common/types/workflow_types.py @@ -94,6 +94,8 @@ class SchedulerMetadata(BaseModel): """Index of the current step""" simulate: bool = False """Whether or not this workflow is being simulated""" + submitted_time: Optional[datetime] = None + """Time workflow was submitted to the scheduler""" start_time: Optional[datetime] = None """Time the workflow started running""" end_time: Optional[datetime] = None @@ -109,7 +111,7 @@ class Workflow(WorkflowDefinition): """scheduler information for the workflow run""" label: Optional[str] = None """Label for the workflow run""" - run_id: str = Field(default_factory=new_ulid_str) + workflow_id: str = Field(default_factory=new_ulid_str) """ID of the workflow run""" steps: list[Step] = [] """WEI Processed Steps of the flow""" diff --git a/madsci/madsci_module/madsci/module/abstract_module.py b/madsci/madsci_module/madsci/module/abstract_module.py index ae5703c..d30abe1 100644 --- a/madsci/madsci_module/madsci/module/abstract_module.py +++ b/madsci/madsci_module/madsci/module/abstract_module.py @@ -472,10 +472,12 @@ def _parse_action_args( else: self.logger.log_info(f"Ignoring unexpected argument {arg_name}") for file in action_request.files: - if file.filename in parameters: - arg_dict[file.filename] = file + + if file in parameters: + arg_dict[file] = action_request.files[file] else: - self.logger.log_info(f"Ignoring unexpected file {file.filename}") + #self.logger.log_info(f"Ignoring unexpected file {file}") + pass return arg_dict def _check_required_args( diff --git a/madsci/madsci_module/madsci/module/rest_module.py b/madsci/madsci_module/madsci/module/rest_module.py index 95961b8..f184d44 100644 --- a/madsci/madsci_module/madsci/module/rest_module.py +++ b/madsci/madsci_module/madsci/module/rest_module.py @@ -130,7 +130,8 @@ def run_action( args = {} with tempfile.TemporaryDirectory() as temp_dir: # * Save the uploaded files to a temporary directory - for file in files: + for i in range(len(files)): + file = files[i] with (Path(temp_dir) / file.filename).open("wb") as f: shutil.copyfileobj(file.file, f) response = super().run_action( diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py index ec653fb..ab84276 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py @@ -6,13 +6,14 @@ from typing import Any, Callable, Dict, Union import redis -from pottery import InefficientAccessWarning, RedisDict, Redlock +from pottery import InefficientAccessWarning, RedisDict, Redlock, RedisList from pydantic import ValidationError from madsci.common.types.workcell_types import WorkcellDefinition from madsci.common.types.workflow_types import Workflow from madsci.common.types.base_types import new_ulid_str from madsci.workcell_manager.workcell_manager_types import WorkcellManagerDefinition +from madsci.common.types.node_types import Node, NodeDefinition class WorkcellRedisHandler: @@ -65,9 +66,14 @@ def _nodes(self) -> RedisDict: key=f"{self._workcell_prefix}:nodes", redis=self._redis_client ) @property + def _workflow_queue(self) -> RedisList: + return RedisList( + key=f"{self._workcell_prefix}:workflow_queue", redis=self._redis_client + ) + @property def _workflows(self) -> RedisDict: return RedisDict( - key=f"{self._workcell_prefix}:workflow_runs", redis=self._redis_client + key=f"{self._workcell_prefix}:workflows", redis=self._redis_client ) @@ -172,20 +178,20 @@ def get_workcell_id(self) -> str: return wc_id # *Workflow Methods - def get_workflow(self, run_id: Union[str, str]) -> Workflow: + def get_workflow(self, workflow_id: Union[str, str]) -> Workflow: """ Returns a workflow by ID """ - return Workflow.model_validate(self._workflows[str(run_id)]) + return Workflow.model_validate(self._workflows[str(workflow_id)]) def get_all_workflows(self) -> dict[str, Workflow]: """ Returns all workflow runs """ valid_workflows = {} - for run_id, workflow in self._workflows.to_dict().items(): + for workflow_id, workflow in self._workflows.to_dict().items(): try: - valid_workflows[str(run_id)] = Workflow.model_validate( + valid_workflows[str(workflow_id)] = Workflow.model_validate( workflow ) except ValidationError: @@ -200,21 +206,71 @@ def set_workflow(self, wf: Workflow) -> None: wf_dump = wf.model_dump(mode="json") else: wf_dump = Workflow.model_validate(wf).model_dump(mode="json") - self._workflows[str(wf_dump["run_id"])] = wf_dump + self._workflows[str(wf_dump["workflow_id"])] = wf_dump self.mark_state_changed() - def delete_workflow(self, run_id: Union[str, str]) -> None: + def delete_workflow(self, workflow_id: Union[str, str]) -> None: """ Deletes a workflow by ID """ - del self._workflows[str(run_id)] + del self._workflows[str(workflow_id)] self.mark_state_changed() def update_workflow( - self, run_id: str, func: Callable[..., Any], *args: Any, **kwargs: Any + self, workflow_id: str, func: Callable[..., Any], *args: Any, **kwargs: Any ) -> None: """ Updates the state of a workflow. """ - self.set_workflow(func(self.get_workflow(run_id), *args, **kwargs)) + self.set_workflow(func(self.get_workflow(workflow_id), *args, **kwargs)) + + def get_node(self, node_name: str) -> Node: + """ + Returns a node by name + """ + return Node.model_validate(self._nodes[node_name]) + + def get_all_nodes(self) -> Dict[str, Node]: + """ + Returns all nodes + """ + valid_nodes = {} + for node_name, node in self._nodes.to_dict().items(): + try: + valid_nodes[str(node_name)] = Node.model_validate(node) + except ValidationError: + continue + return valid_nodes + def set_node( + self, node_name: str, node: Union[Node, NodeDefinition, Dict[str, Any]] + ) -> None: + """ + Sets a node by name + """ + if isinstance(node, Node): + node_dump = node.model_dump(mode="json") + elif isinstance(node, NodeDefinition): + node_dump = Node.model_validate( + node, from_attributes=True + ).model_dump(mode="json") + else: + node_dump = Node.model_validate(node).model_dump(mode="json") + self._nodes[node_name] = node_dump + self.mark_state_changed() + def delete_node(self, node_name: str) -> None: + """ + Deletes a node by name + """ + del self._nodes[node_name] + self.mark_state_changed() + + def update_node( + self, node_name: str, func: Callable[..., Any], *args: Any, **kwargs: Any + ) -> None: + """ + Updates the state of a node. + """ + self.set_node( + node_name, func(self.get_node(node_name), *args, **kwargs) + ) diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/__init__.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/__init__.py index 52e9b73..3199fce 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/__init__.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/__init__.py @@ -1 +1 @@ -"""MADSci Workcell Manager.""" +"""MADSci Workcell Manager Schedulers.""" diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler b/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler deleted file mode 100644 index 078c5d9..0000000 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler +++ /dev/null @@ -1,34 +0,0 @@ -from scheduler import Scheduler -class DefaultScheduler(Scheduler): - - def run_iteration(self): - for run_id, wf in self.state_handler.get_all_workflow_runs().items(): - if wf.status == WorkflowStatus.NEW: - wf.status = WorkflowStatus.QUEUED - print( - f"Processed new workflow: {wf.name} with run_id: {run_id}" - ) - #send_event(WorkflowQueuedEvent.from_wf_run(wf_run=wf_run)) - self.state_handler.set_workflow_run(wf) - elif wf.status in [ - WorkflowStatus.QUEUED, - WorkflowStatus.IN_PROGRESS, - ]: - step = wf.steps[wf.step_index] - if check_step(wf.experiment_id, run_id, step): - module = find_step_module( - self.state_handler.get_workcell(), step.module - ) - - #if wf_run.status == WorkflowStatus.QUEUED: - #send_event(WorkflowStartEvent.from_wf_run(wf_run=wf_run)) - wf.status = WorkflowStatus.RUNNING - print( - f"Starting step {wf.name}.{step.name} for run: {run_id}" - ) - if wf.step_index == 0: - wf.start_time = datetime.now() - self.state_handler.set_workflow_run(wf) - run_step(wf_run=wf, module=module) - - diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler.py new file mode 100644 index 0000000..6c99cd3 --- /dev/null +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler.py @@ -0,0 +1,108 @@ +from schedulers.scheduler import AbstractScheduler +from madsci.common.types.workflow_types import WorkflowStatus +from madsci.common.types.node_types import Node +from madsci.common.types.step_types import Step +from madsci.common.utils import threaded_daemon +from madsci.client.node.abstract_node_client import AbstractNodeClient +from workcell_utils import find_node_client +from redis_handler import WorkcellRedisHandler +from madsci.common.types.action_types import ActionRequest, ActionStatus, ActionResult +from datetime import datetime +from typing import Optional + + +class Scheduler(AbstractScheduler): + + def run_iteration(self): + workflows = sorted(self.state_handler.get_all_workflows().values(), key=lambda item: item.scheduler_metadata.submitted_time) + for wf in workflows: + workflow_id = wf.workflow_id + if wf.scheduler_metadata.status == WorkflowStatus.NEW: + wf.scheduler_metadata.status = WorkflowStatus.QUEUED + print( + f"Processed new workflow: {wf.name} with id: {workflow_id}" + ) + #send_event(WorkflowQueuedEvent.from_wf_run(wf_run=wf_run)) + self.state_handler.set_workflow(wf) + elif wf.scheduler_metadata.status in [ + WorkflowStatus.QUEUED, + WorkflowStatus.IN_PROGRESS, + ]: + step = wf.steps[wf.scheduler_metadata.step_index] + if self.check_step(step): + + #if wf_run.status == WorkflowStatus.QUEUED: + #send_event(WorkflowStartEvent.from_wf_run(wf_run=wf_run)) + wf.scheduler_metadata.status = WorkflowStatus.RUNNING + print( + f"Starting step {wf.name}.{step.name} for workflow: {workflow_id}" + ) + if wf.scheduler_metadata.step_index == 0: + wf.scheduler_metadata.start_time = datetime.now() + self.state_handler.set_workflow(wf) + self.run_step(workflow_id, step) + def check_step(self, step: Step): + return self.resource_checks(step) and self.node_checks(step) + + def resource_checks(self, step: Step): + return True + + def node_checks(self, step: Step): + node = self.state_handler.get_node(step.node) + if node.status.ready: + return True + return False + + def retry_action(self, node: Node, client: AbstractNodeClient, response: Optional[ActionResult] = None): + if node.info.capabilities.get_action_result: + + while response is None or response.status not in ["not_ready", "succeeded", "failed"]: + try: + response = client.get_action_result(request.action_id) + time.sleep(5) + except Exception: + time.sleep(5) + return response + return response + + @threaded_daemon + def run_step(self, workflow_id: str, step: Step): + node = self.state_handler.get_node(step.node) + client = find_node_client(node.node_url) + try: + request = ActionRequest(action_name=step.action, args=step.args, files=step.files) + response = client.send_action(request) + except Exception: + response = self.retry_action(node, client) + response = self.retry_action(node, client, response) + with self.state_handler.wc_state_lock(): + wf = self.state_handler.get_workflow(workflow_id) + if response.status in ["succeeded", "failed"]: + wf.steps[wf.scheduler_metadata.step_index].status = response.status + wf.steps[wf.scheduler_metadata.step_index].results[response.action_id] = response + if response.status == "succeeded": + new_index = wf.scheduler_metadata.step_index + 1 + if new_index == len(wf.flowdef): + wf.scheduler_metadata.status = WorkflowStatus.COMPLETED + else: + wf.scheduler_metadata.step_index = new_index + wf.scheduler_metadata.status = WorkflowStatus.QUEUED + if response.status == "failed": + wf.scheduler_metadata.status = WorkflowStatus.FAILED + #print(self.state_handler.get_all_workflows()) + #print(wf) + self.state_handler.set_workflow(wf) + #print(self.state_handler.get_all_workflows()) + + + + + + + + + + + + + diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/scheduler.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/scheduler.py index e4f0d24..c5ebd8b 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/scheduler.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/scheduler.py @@ -1,59 +1,17 @@ from madsci.workcell_manager.redis_handler import WorkcellRedisHandler -from madsci.workcell_manager.types import WorkcellManagerDefinition -from madsci.common.types.workflow_types import WorkflowStatus +from madsci.workcell_manager.workcell_manager_types import WorkcellManagerDefinition from madsci.common.types.event_types import Event -from madsci.common.utils import threaded_task -import time -import datetime def send_event(test: Event): pass -class Scheduler: - def __init__(self, workcell_manager_definition: WorkcellManagerDefinition): - self.state_handler = WorkcellRedisHandler(workcell_manager_definition) +class AbstractScheduler: + def __init__(self, workcell_manager_definition: WorkcellManagerDefinition, state_handler: WorkcellRedisHandler): + self.state_handler = state_handler self.workcell_manager_definition = workcell_manager_definition self.running = True def run_iteration(self): pass - @threaded_task - def start(self): - while self.running is True: - self.run_iteration() - time.sleep(self.workcell_manager_definition.plugin_config.scheduler_interval) - -class DefaultScheduler(Scheduler): - - def run_iteration(self): - for run_id, wf_run in self.state_handler.get_all_workflow_runs().items(): - if wf_run.status == WorkflowStatus.NEW: - wf_run.status = WorkflowStatus.QUEUED - print( - f"Processed new workflow: {wf_run.name} with run_id: {run_id}" - ) - #send_event(WorkflowQueuedEvent.from_wf_run(wf_run=wf_run)) - self.state_handler.set_workflow_run(wf_run) - elif wf_run.status in [ - WorkflowStatus.QUEUED, - WorkflowStatus.IN_PROGRESS, - ]: - step = wf_run.steps[wf_run.step_index] - if check_step(wf_run.experiment_id, run_id, step): - module = find_step_module( - self.state_handler.get_workcell(), step.module - ) - - #if wf_run.status == WorkflowStatus.QUEUED: - #send_event(WorkflowStartEvent.from_wf_run(wf_run=wf_run)) - wf_run.status = WorkflowStatus.RUNNING - print( - f"Starting step {wf_run.name}.{step.name} for run: {run_id}" - ) - if wf_run.step_index == 0: - wf_run.start_time = datetime.now() - self.state_handler.set_workflow_run(wf_run) - run_step(wf_run=wf_run, module=module) - - + diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_engine.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_engine.py index 2f98d55..705b3ff 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_engine.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_engine.py @@ -3,16 +3,17 @@ """ import time +import importlib import traceback import requests import importlib #from schedulers.default_scheduler import DefaultScheduler -from workcell_utils import initialize_state +from workcell_utils import initialize_workcell, update_active_nodes from workcell_manager_types import WorkcellManagerDefinition from redis_handler import WorkcellRedisHandler from workflow_utils import cancel_active_workflows - +from madsci.common.utils import threaded_daemon class Engine: """ Handles scheduling workflows and executing steps on the workcell. @@ -24,13 +25,50 @@ def __init__(self, workcell_manager_definition: WorkcellManagerDefinition, state state_manager.clear_state( clear_workflows=workcell_manager_definition.plugin_config.clear_workflows ) + self.definition = workcell_manager_definition + self.state_manager = state_manager cancel_active_workflows(state_manager) - #self.scheduler = DefaultScheduler() + scheduler_module = importlib.import_module(self.definition.plugin_config.scheduler) + self.scheduler = scheduler_module.Scheduler(self.definition, self.state_manager) with state_manager.wc_state_lock(): - initialize_state(state_manager) - #time.sleep(workcell_manager_definition.plugin_config.cold_start_delay) + initialize_workcell(state_manager) + time.sleep(workcell_manager_definition.plugin_config.cold_start_delay) print("Engine initialized, waiting for workflows...") #send_event(WorkcellStartEvent(workcell=state_manager.get_workcell())) - \ No newline at end of file + def spin(self) -> None: + """ + Continuously loop, updating module states every Config.update_interval seconds. + If the state of the workcell has changed, update the active modules and run the scheduler. + """ + update_active_nodes(self.state_manager) + node_tick = time.time() + scheduler_tick = time.time() + heartbeat = time.time() + while True and not self.state_manager.shutdown: + try: + if time.time() - heartbeat > 2: + heartbeat = time.time() + print(f"Heartbeat: {time.time()}") + if ( + time.time() - node_tick > self.definition.plugin_config.node_update_interval + or self.state_manager.has_state_changed() + ): + if not self.state_manager.paused: + update_active_nodes(self.state_manager) + node_tick = time.time() + if time.time() - scheduler_tick > self.definition.plugin_config.scheduler_update_interval: + self.scheduler.run_iteration() + scheduler_tick = time.time() + except Exception: + traceback.print_exc() + print( + f"Error in engine loop, waiting {self.definition.plugin_config.node_update_interval} seconds before trying again." + ) + time.sleep(self.definition.plugin_config.node_update_interval) + + @threaded_daemon + def start_engine_thread(self) -> None: + """Spins the engine in its own thread""" + self.spin() diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_manager_types.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_manager_types.py index e140086..56fd3ab 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_manager_types.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_manager_types.py @@ -21,3 +21,4 @@ class WorkcellManagerDefinition(ManagerDefinition): description="The configuration for the workcell manager plugin.", ) + diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py index 5cfdf38..ccedcea 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py @@ -9,11 +9,13 @@ ) from madsci.workcell_manager.workflow_utils import create_workflow, save_workflow_files +from madsci.workcell_manager.workcell_utils import find_node_client from workcell_engine import Engine from typing import Annotated, Optional from madsci.common.types.workcell_types import WorkcellDefinition from madsci.common.types.workflow_types import WorkflowDefinition, Workflow +from madsci.common.types.node_types import Node import argparse import json import traceback @@ -29,7 +31,8 @@ async def lifespan(app: FastAPI) -> None: app.state.state_handler=WorkcellRedisHandler(workcell_manager_definition) app.state.state_handler.set_workcell(workcell) - Engine(workcell_manager_definition, app.state.state_handler) + engine = Engine(workcell_manager_definition, app.state.state_handler) + engine.start_engine_thread() yield app = FastAPI(lifespan=lifespan) @@ -43,6 +46,39 @@ def get_workcell() -> WorkcellDefinition: """Get information about the resource manager.""" return app.state.state_handler.get_workcell() +@app.get("/nodes") +def get_nodes() -> dict[str, Node]: + """Get information about the resource manager.""" + return app.state.state_handler.get_all_nodes() + +@app.get("/admin/{command}") +def send_admin_command(command: str) -> list: + """Get information about the resource manager.""" + responses = [] + for node in app.state.state_handler.get_all_nodes().values(): + if command in node.info.capabilities.admin_commands: + client = find_node_client(node.node_url) + response = client.send_admin_command(command) + responses.append(response) + return responses + +@app.get("/admin/{command}/{node}") +def send_admin_command_to_node(command: str, node: str) -> list: + """Get information about the resource manager.""" + responses = [] + node = app.state.state_handler.get_node(node) + if command in node.info.capabilities.admin_commands: + client = find_node_client(node.node_url) + response = client.send_admin_command(command) + responses.append(response) + return responses + + +@app.get("/workflows") +def get_workflows() -> dict[str, Workflow]: + """Get information about the resource manager.""" + return app.state.state_handler.get_all_workflows() + @app.post("/start_workflow") async def start_workflow( workflow: Annotated[str, Form()], @@ -95,6 +131,7 @@ async def start_workflow( workcell=workcell, experiment_id=experiment_id, parameters=parameters, + state_manager=app.state.state_handler ) if not validate_only: diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_utils.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_utils.py index 512c915..9170f4c 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_utils.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_utils.py @@ -3,39 +3,34 @@ from madsci.common.types.workcell_types import WorkcellDefinition from madsci.common.types.node_types import NodeStatus, Node, NodeDefinition from madsci.client.node import AbstractNodeClient, NODE_CLIENT_MAP +import concurrent +import traceback +import warnings -def initialize_state(state_manager: WorkcellRedisHandler, workcell=None) -> None: +def initialize_workcell(state_manager: WorkcellRedisHandler, workcell=None) -> None: """ Initializes the state of the workcell from the workcell definition. """ if not workcell: workcell = state_manager.get_workcell() - initialize_workcell_nodes(workcell) + initialize_workcell_nodes(workcell, state_manager) initialize_workcell_resources(workcell) -def initialize_workcell_nodes(workcell): - for value in workcell.nodes.values(): +def initialize_workcell_nodes(workcell: WorkcellDefinition, state_manager: WorkcellRedisHandler): + for key, value in workcell.nodes.items(): if type(value) is NodeDefinition: - url = value.url - elif type(value) is AnyUrl: - url = value - elif type(value) is str: - url = AnyUrl(value) - update_node_info(url, workcell) - update_node_status(url, workcell) + node = Node(node_url=value.node_url) + elif type(value) is AnyUrl or type(value) is str: + node = Node(node_url=AnyUrl(value)) + state_manager.set_node(key, node) def initialize_workcell_resources(workcell): pass -def update_node_info(url: AnyUrl, workcell: WorkcellDefinition): - client = find_node_client(url) - print(client.get_info()) -def update_node_status(url: AnyUrl, workcell: WorkcellDefinition): - client = find_node_client(url) - print(client.get_status()) + def find_node_client(url: str) -> AbstractNodeClient: """finds the right client for the node url provided""" @@ -46,3 +41,35 @@ def find_node_client(url: str) -> AbstractNodeClient: if client.validate_url(url): return client(url) return None + +def update_active_nodes(state_manager: WorkcellRedisHandler) -> None: + """Update all active nodes in the workcell.""" + with concurrent.futures.ThreadPoolExecutor() as executor: + node_futures = [] + for node_name, node in state_manager.get_all_nodes().items(): + node_future = executor.submit(update_node, node_name, node, state_manager) + node_futures.append(node_future) + + # Wait for all node updates to complete + concurrent.futures.wait(node_futures) + + +def update_node(node_name: str, node: Node, state_manager: WorkcellRedisHandler) -> None: + """Update a single node's state and about information.""" + try: + old_status = node.status + old_info = node.info + client = find_node_client(node.node_url) + node.status = client.get_status() + node.info = client.get_info() + node.state = client.get_state() + if old_status != node.status or old_info != node.info: + with state_manager.wc_state_lock(): + state_manager.set_node(node_name, node) + except Exception: + warnings.warn( + message=f"Unable to update node {node_name}", + category=UserWarning, + stacklevel=1, + ) + diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py index ea132d3..5e14fc2 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py @@ -9,7 +9,7 @@ import re import copy from pathlib import Path - +from datetime import datetime def validate_node_names(workflow: Workflow, workcell: WorkcellDefinition) -> None: """ @@ -23,13 +23,48 @@ def replace_positions(workcell: WorkcellDefinition, step: Step): """Allow the user to put location names instead of """ pass -def validate_step(step: Step) -> tuple[bool, str]: - """Check if a step is valid based on the node's info""" - return (True, "") +def validate_step(step: Step, state_manager: WorkcellRedisHandler) -> tuple[bool, str]: + """Check if a step is valid based on the module's about""" + if step.node in state_manager.get_all_nodes(): + node = state_manager.get_node(step.node) + info = node.info + if info is None: + return ( + True, + f"Node {step.node} didn't return proper about information, skipping validation", + ) + if step.action in info.actions: + action = info.actions[step.action] + for action_arg in action.args.values(): + if action_arg.name not in step.args and action_arg.required: + return ( + False, + f"Step '{step.name}': Node {step.node}'s action, '{step.action}', is missing arg '{action_arg.name}'", + ) + # TODO: Action arg type validation goes here + for action_file in action.files: + if action_file.name not in step.files and action_file.required: + return ( + False, + f"Step '{step.name}': Node {step.node}'s action, '{step.action}', is missing file '{action_file.name}'", + ) + return True, f"Step '{step.name}': Validated successfully" + + return ( + False, + f"Step '{step.name}': Node {step.node} has no action '{step.action}'", + ) + else: + return ( + False, + f"Step '{step.name}': Node {step.node} is not defined in workcell", + ) + def create_workflow( workflow_def: WorkflowDefinition, workcell: WorkcellDefinition, + state_manager: WorkcellRedisHandler, experiment_id: Optional[str] = None, parameters: Optional[dict[str, Any]] = None, simulate: bool = False, @@ -69,18 +104,17 @@ def create_workflow( } ) wf = Workflow(**wf_dict) - steps = [] for step in workflow_def.flowdef: replace_positions(workcell, step) - valid, validation_string = validate_step(step) + valid, validation_string = validate_step(step, state_manager=state_manager) print(validation_string) if not valid: raise ValueError(validation_string) steps.append(step) wf.steps = steps - + wf.scheduler_metadata.submitted_time = datetime.now() return wf def save_workflow_files(working_directory: str, workflow: Workflow, files: list[UploadFile]) -> Workflow: @@ -88,7 +122,7 @@ def save_workflow_files(working_directory: str, workflow: Workflow, files: list[ and updates the step files to point to the new location""" get_workflow_inputs_directory( - workflow_run_id=workflow.run_id, + workflow_id=workflow.workflow_id, working_directory=working_directory ).mkdir(parents=True, exist_ok=True) if files: @@ -96,7 +130,7 @@ def save_workflow_files(working_directory: str, workflow: Workflow, files: list[ file_path = ( get_workflow_inputs_directory( working_directory=working_directory, - workflow_run_id=workflow.run_id, + workflow_id=workflow.workflow_id, ) / file.filename ) @@ -109,16 +143,16 @@ def save_workflow_files(working_directory: str, workflow: Workflow, files: list[ print(f"{step_file_key}: {file_path} ({step_file_path})") return workflow -def get_workflow_inputs_directory(workflow_run_id: str = None, working_directory: str = None) -> Path: +def get_workflow_inputs_directory(workflow_id: str = None, working_directory: str = None) -> Path: """returns a directory name for the workflows inputs""" - return Path(working_directory) / "Workflows" / workflow_run_id / "Inputs" + return Path(working_directory) / "Workflows" / workflow_id / "Inputs" def cancel_workflow(wf: Workflow, state_manager: WorkcellRedisHandler) -> None: """Cancels the workflow run""" wf.scheduler_metadata.status = WorkflowStatus.CANCELLED with state_manager.wc_state_lock(): - state_manager.set_workflow_run(wf) + state_manager.set_workflow(wf) return wf @@ -130,4 +164,4 @@ def cancel_active_workflows(state_manager: WorkcellRedisHandler) -> None: WorkflowStatus.QUEUED, WorkflowStatus.IN_PROGRESS, ]: - cancel_workflow(wf) + cancel_workflow(wf, state_manager=state_manager) diff --git a/tests/example/workcells/test_workcell.workcell.yaml b/tests/example/workcells/test_workcell.workcell.yaml index 12041e3..e9cefc4 100644 --- a/tests/example/workcells/test_workcell.workcell.yaml +++ b/tests/example/workcells/test_workcell.workcell.yaml @@ -7,7 +7,6 @@ config: auto_start: true workcell_directory: "/.MADSci/Workcell" -nodes: { +nodes: "liquid_handler": "http://localhost:2000" - -} + diff --git a/tests/example/workflows/test_workflow.workflow.yaml b/tests/example/workflows/test_workflow.workflow.yaml index 9e7fe23..23d6a0a 100644 --- a/tests/example/workflows/test_workflow.workflow.yaml +++ b/tests/example/workflows/test_workflow.workflow.yaml @@ -8,10 +8,17 @@ parameters: default: 1.5 flowdef: - name: Get plate to thingy - node: transfer - action: transfer + node: "liquid_handler" + action: run_command args: - target: thingy + command: thingy + comment: Get a new plate + files: {"protocol": "/workspaces/MADSci/tests/example/protocols/protocol.txt"} + - name: Get plate to thingy + node: liquid_handler + action: run_command + args: + command: thingy comment: Get a new plate files: {"protocol": "/workspaces/MADSci/tests/example/protocols/protocol.txt"} From 421789fc648844a9ec64c915f53a7f8aefd807f4 Mon Sep 17 00:00:00 2001 From: root Date: Mon, 13 Jan 2025 18:53:35 +0000 Subject: [PATCH 4/5] a bunch of stuff --- .../madsci/client/node/rest_node_client.py | 1 + .../madsci/client/workcell/__init__.py | 9 + .../madsci/client/workcell/workcell_client.py | 288 ++++++++++++++++++ .../madsci/client/workflow/__init__.py | 9 - .../madsci/client/workflow/workflow_client.py | 146 --------- .../madsci_common/madsci/common/exceptions.py | 16 + .../madsci/common/types/node_types.py | 12 + .../madsci/common/types/workcell_types.py | 6 +- .../madsci/common/types/workflow_types.py | 53 ++-- .../madsci/tests/workflow_tests.py | 12 +- .../madsci/workcell_manager/redis_handler.py | 10 + .../schedulers/default_scheduler.py | 86 +----- .../workcell_manager/workcell_engine.py | 81 ++++- .../workcell_manager/workcell_server.py | 147 ++++++++- .../madsci/workcell_manager/workflow_utils.py | 22 +- .../workcells/test_workcell.workcell.yaml | 38 ++- 16 files changed, 645 insertions(+), 291 deletions(-) create mode 100644 madsci/madsci_client/madsci/client/workcell/__init__.py create mode 100644 madsci/madsci_client/madsci/client/workcell/workcell_client.py delete mode 100644 madsci/madsci_client/madsci/client/workflow/__init__.py delete mode 100644 madsci/madsci_client/madsci/client/workflow/workflow_client.py diff --git a/madsci/madsci_client/madsci/client/node/rest_node_client.py b/madsci/madsci_client/madsci/client/node/rest_node_client.py index 75eb93f..c854c14 100644 --- a/madsci/madsci_client/madsci/client/node/rest_node_client.py +++ b/madsci/madsci_client/madsci/client/node/rest_node_client.py @@ -74,6 +74,7 @@ def send_action(self, action_request: ActionRequest) -> ActionResult: for file in files: file[1][1].close() if not rest_response.ok: + print(rest_response) rest_response.raise_for_status() return ActionResult.model_validate(rest_response.json()) diff --git a/madsci/madsci_client/madsci/client/workcell/__init__.py b/madsci/madsci_client/madsci/client/workcell/__init__.py new file mode 100644 index 0000000..ce335cc --- /dev/null +++ b/madsci/madsci_client/madsci/client/workcell/__init__.py @@ -0,0 +1,9 @@ +"""MADSci workflow client implementations.""" + +from madsci.client.workcell.workcell_client import WorkcellClient + + + +__all__ = [ + "WorkcellClient", +] diff --git a/madsci/madsci_client/madsci/client/workcell/workcell_client.py b/madsci/madsci_client/madsci/client/workcell/workcell_client.py new file mode 100644 index 0000000..64316e6 --- /dev/null +++ b/madsci/madsci_client/madsci/client/workcell/workcell_client.py @@ -0,0 +1,288 @@ +from madsci.common.types.workflow_types import Workflow, WorkflowDefinition, WorkflowStatus +from madsci.common.exceptions import WorkflowFailedException, WorkflowCanceledException +from typing import Any, Optional + +from pathlib import Path +import re +import copy +import requests +import json +import time +from datetime import datetime + +class WorkcellClient: + """a client for running workflows""" + def __init__(self, workcell_manager_url: str, working_directory: str="~/.MADsci/temp", ownership_info: Optional[OwnershipInfo] = None) -> "WorkflowClient": + """initialize the client""" + self.url = workcell_manager_url + self.working_directory = Path(working_directory) + self.ownership_info = ownership_info + + def query_workflow(self, workflow_id: str) -> Workflow: + """Checks on a workflow run using the id given + + Parameters + ---------- + + workflow_id : str + The id returned by the start_workflow function for this run + + Returns + ------- + + response: Dict + The JSON portion of the response from the server""" + + url = f"{self.url}/workflows/{workflow_id}" + response = requests.get(url) + + if response.ok: + return Workflow(**response.json()) + else: + response.raise_for_status() + + def start_workflow(self, + workflow: str, + parameters: dict, + validate_only: bool = False, + blocking: bool = True, + raise_on_failed: bool = True, + raise_on_cancelled: bool = True) -> Workflow: + """send a workflow to the workcell manager""" + workflow = WorkflowDefinition.from_yaml(workflow) + WorkflowDefinition.model_validate(workflow) + insert_parameter_values(workflow=workflow, parameters=parameters) + files = self._extract_files_from_workflow(workflow) + url = self.url + "/workflows/start" + response = requests.post( + url, + data={ + "workflow": workflow.model_dump_json(), + "parameters": json.dumps(parameters), + "validate_only": validate_only + }, + files={ + ("files", (str(Path(path).name), Path.open(Path(path), "rb"))) + for _, path in files.items() + }, + ) + if not blocking: + return Workflow(**response.json()) + else: + return self.await_workflow(response.json()["workflow_id"], raise_on_cancelled=raise_on_cancelled, raise_on_failed=raise_on_failed) + + def _extract_files_from_workflow( + self, workflow: WorkflowDefinition + ) -> dict[str, Any]: + """ + Returns a dictionary of files from a workflow + """ + files = {} + for step in workflow.flowdef: + if step.files: + for file, path in step.files.items(): + unique_filename = f"{step.step_id}_{file}" + files[unique_filename] = path + if not Path(files[unique_filename]).is_absolute(): + files[unique_filename] = ( + self.working_directory / files[unique_filename] + ) + step.files[file] = Path(files[unique_filename]).name + return files + def run_workflows_in_order(workflows: list[str], parameters: list[dict[str: Any]]): + for i in range(len(workflows)): + self.start_workflow(workflows[i], parameters[i], blocking=True) + def run_workflow_batch(workflows: list[str], parameters: list[dict[str: Any]]): + id_list = [] + for i in range(len(workflows)): + response = self.start_workflow(workflows[i], parameters[i], blocking=False) + id_list.append(response.json()["workflow_id"]) + finished = False + while finished == False: + flag = True + wfs = [] + for id in id_list: + wf = self.query_workflow(id) + flag = flag and (wf.status in ["completed", "failed"]) + wfs.append(wf) + finished = flag + return wfs + def retry_workflow(self, workflow_id: str, index: int = -1): + url = f"{self.url}/workflows/retry" + response = requests.post(url, + params={ + "workflow_id": node_name, + "index": index, + }) + return response.json() + def resubmit_workflow(self, workflow_id: str, + blocking: bool = True, + raise_on_failed: bool = True, + raise_on_cancelled: bool = True): + url = f"{self.url}/workflows/resubmit/{workflow_id}" + response = requests.get(url) + new_wf = Workflow(**response.json()) + if blocking: + return self.await_workflow(new_wf.workflow_id, raise_on_failed=raise_on_failed, raise_on_cancelled=raise_on_cancelled) + else: + return new_wf + + def await_workflow(self, workflow_id: str, + raise_on_failed: bool = True, + raise_on_cancelled: bool = True): + prior_status = None + prior_index = None + while True: + wf = self.query_workflow(workflow_id) + status = wf.status + step_index = wf.step_index + if prior_status != status or prior_index != step_index: + if step_index < len(wf.steps): + step_name = wf.steps[step_index].name + else: + step_name = "Workflow End" + print() + print( + f"{wf.name} [{step_index}]: {step_name} ({wf.status})", + end="", + flush=True, + ) + else: + print(".", end="", flush=True) + time.sleep(1) + if wf.status in [ + WorkflowStatus.COMPLETED, + WorkflowStatus.FAILED, + WorkflowStatus.CANCELLED, + ]: + break + prior_status = status + prior_index = step_index + if wf.status == WorkflowStatus.FAILED and raise_on_failed: + raise WorkflowFailedException( + f"Workflow {wf.name} ({wf.workflow_id}) failed on step {wf.step_index}: '{wf.steps[wf.step_index].name}'." + ) + if wf.status == WorkflowStatus.CANCELLED and raise_on_cancelled: + raise WorkflowFailedException( + f"Workflow {wf.name} ({wf.workflow_id}) was cancelled on step {wf.step_index}: '{wf.steps[wf.step_index].name}'." + ) + return wf + def get_all_nodes(self): + url = f"{self.url}/nodes" + response = requests.get(url) + return response.json() + def get_node(self, node_name): + url = f"{self.url}/nodes/{node_name}" + response = requests.get(url) + return response.json() + def add_node(self, node_name: str, node_url: str, node_description: str="A Node", permanent: bool=False): + url = f"{self.url}/nodes/add_node" + response = requests.post(url, + params={ + "node_name": node_name, + "node_url": node_url, + "node_description": node_description, + "permanent": permanent + }) + return response.json() + def reserve_node(self, node_name: str, duration: datetime): + url = f"{self.url}/nodes/reserve" + response = requests.post(url, + params={ + "node_name": node_name, + "duration": str(datetime) + "ownership_info": self.ownership_info + }) + return response.json() + def get_all_workflows(self): + url = f"{self.url}/workflows" + response = requests.get(url) + return response.json() +def insert_parameter_values(workflow: WorkflowDefinition, parameters: dict[str, Any]) -> Workflow: + """Replace the parameter strings in the workflow with the provided values""" + for param in workflow.parameters: + if param.name not in parameters: + if param.default: + parameters[param.name] = param.default + else: + raise ValueError( + "Workflow parameter: " + + param.name + + " not provided, and no default value is defined." + ) + steps = [] + for step in workflow.flowdef: + for key, val in iter(step): + if type(val) is str: + setattr(step, key, value_substitution(val, parameters)) + + step.args = walk_and_replace(step.args, parameters) + steps.append(step) + workflow.flowdef = steps + +def walk_and_replace(args: dict[str, Any], input_parameters: dict[str, Any]) -> dict[str, Any]: + """Recursively walk the arguments and replace all parameters""" + new_args = copy.deepcopy(args) + for key, val in args.items(): + if type(val) is str: + new_args[key] = value_substitution(val, input_parameters) + elif type(args[key]) is dict: + new_args[key] = walk_and_replace(val, input_parameters) + if type(key) is str: + new_key = value_substitution(key, input_parameters) + new_args[new_key] = new_args[key] + if key is not new_key: + new_args.pop(key, None) + return new_args + + +def value_substitution(input_string: str, input_parameters: dict[str, Any]) -> str: + """Perform $-string substitution on input string, returns string with substituted values""" + # * Check if the string is a simple parameter reference + if type(input_string) is str and re.match(r"^\$[A-z0-9_\-]*$", input_string): + if input_string.strip("$") in input_parameters: + input_string = input_parameters[input_string.strip("$")] + else: + raise ValueError( + "Unknown parameter:" + + input_string + + ", please define it in the parameters section of the Workflow Definition." + ) + else: + # * Replace all parameter references contained in the string + working_string = input_string + for match in re.findall(r"((? "WorkflowClient": - """initialize the client""" - self.url = workcell_manager_url - self.working_directory = Path(working_directory) - - def send_workflow(self, workflow: str, parameters: dict, validate_only: bool = False) -> Workflow: - """send a workflow to the workcell manager""" - workflow = WorkflowDefinition.from_yaml(workflow) - WorkflowDefinition.model_validate(workflow) - insert_parameter_values(workflow=workflow, parameters=parameters) - files = self._extract_files_from_workflow(workflow) - url = self.url + "/start_workflow" - response = requests.post( - url, - data={ - "workflow": workflow.model_dump_json(), - "parameters": json.dumps(parameters), - "validate_only": validate_only - }, - files={ - ("files", (str(Path(path).name), Path.open(Path(path), "rb"))) - for _, path in files.items() - }, - ) - def _extract_files_from_workflow( - self, workflow: WorkflowDefinition - ) -> dict[str, Any]: - """ - Returns a dictionary of files from a workflow - """ - files = {} - for step in workflow.flowdef: - if step.files: - for file, path in step.files.items(): - # * Try to get the file from the payload, if applicable - unique_filename = f"{step.step_id}_{file}" - files[unique_filename] = path - if not Path(files[unique_filename]).is_absolute(): - files[unique_filename] = ( - self.working_directory / files[unique_filename] - ) - step.files[file] = Path(files[unique_filename]).name - return files - - -def insert_parameter_values(workflow: WorkflowDefinition, parameters: dict[str, Any]) -> Workflow: - """Replace the parameter strings in the workflow with the provided values""" - for param in workflow.parameters: - if param.name not in parameters: - if param.default: - parameters[param.name] = param.default - else: - raise ValueError( - "Workflow parameter: " - + param.name - + " not provided, and no default value is defined." - ) - steps = [] - for step in workflow.flowdef: - for key, val in iter(step): - if type(val) is str: - setattr(step, key, value_substitution(val, parameters)) - - step.args = walk_and_replace(step.args, parameters) - steps.append(step) - workflow.flowdef = steps - - -def walk_and_replace(args: dict[str, Any], input_parameters: dict[str, Any]) -> dict[str, Any]: - """Recursively walk the arguments and replace all parameters""" - new_args = copy.deepcopy(args) - for key, val in args.items(): - if type(val) is str: - new_args[key] = value_substitution(val, input_parameters) - elif type(args[key]) is dict: - new_args[key] = walk_and_replace(val, input_parameters) - if type(key) is str: - new_key = value_substitution(key, input_parameters) - new_args[new_key] = new_args[key] - if key is not new_key: - new_args.pop(key, None) - return new_args - - -def value_substitution(input_string: str, input_parameters: dict[str, Any]) -> str: - """Perform $-string substitution on input string, returns string with substituted values""" - # * Check if the string is a simple parameter reference - if type(input_string) is str and re.match(r"^\$[A-z0-9_\-]*$", input_string): - if input_string.strip("$") in input_parameters: - input_string = input_parameters[input_string.strip("$")] - else: - raise ValueError( - "Unknown parameter:" - + input_string - + ", please define it in the parameters section of the Workflow Definition." - ) - else: - # * Replace all parameter references contained in the string - working_string = input_string - for match in re.findall(r"((? str: return "; ".join(reasons) return "Node is ready" +class Reservation(BaseModel): + owned_by: OwnershipInfo + + started: datetime + class NodeSetConfigResponse(BaseModel): """Response from a Node Set Config Request""" diff --git a/madsci/madsci_common/madsci/common/types/workcell_types.py b/madsci/madsci_common/madsci/common/types/workcell_types.py index 44764e4..d9d4aea 100644 --- a/madsci/madsci_common/madsci/common/types/workcell_types.py +++ b/madsci/madsci_common/madsci/common/types/workcell_types.py @@ -80,14 +80,14 @@ class WorkcellConfig(BaseModel): description="The password for the redis server.", ) scheduler_update_interval: float = Field( - default=0.1, + default=2.0, title="Scheduler Update Interval", - description="The interval at which the scheduler runs, in seconds.", + description="The interval at which the scheduler runs, in seconds. Must be >= node_update_interval", ) node_update_interval: float = Field( default=1.0, title="Node Update Interval", - description="The interval at which the workcell queries its node's states, in seconds.", + description="The interval at which the workcell queries its node's states, in seconds.Must be <= scheduler_update_interval", ) auto_start: bool = Field( default=True, diff --git a/madsci/madsci_common/madsci/common/types/workflow_types.py b/madsci/madsci_common/madsci/common/types/workflow_types.py index ba092ce..0628922 100644 --- a/madsci/madsci_common/madsci/common/types/workflow_types.py +++ b/madsci/madsci_common/madsci/common/types/workflow_types.py @@ -9,35 +9,28 @@ class WorkflowStatus(str, Enum): """Status for a workflow run""" - - NEW = "new" - """Newly created workflow run, hasn't been queued yet""" QUEUED = "queued" """Workflow run is queued, hasn't started yet""" RUNNING = "running" """Workflow is currently running a step""" IN_PROGRESS = "in_progress" - """Workflow run has started, but is not actively running a step""" - PAUSED = "paused" - """Workflow run is paused""" + """Workflow has started, but is not actively running a step""" COMPLETED = "completed" - """Workflow run has completed""" + """Workflow has completed""" FAILED = "failed" - """Workflow run has failed""" + """Workflow has failed""" UNKNOWN = "unknown" - """Workflow run status is unknown""" + """Workflow status is unknown""" CANCELLED = "cancelled" - """Workflow run has been cancelled""" + """Workflow has been cancelled""" @property def is_active(self) -> bool: """Whether or not the workflow run is active""" return self in [ - WorkflowStatus.NEW, WorkflowStatus.QUEUED, WorkflowStatus.RUNNING, WorkflowStatus.IN_PROGRESS, - WorkflowStatus.PAUSED, ] @@ -88,21 +81,10 @@ def ensure_data_label_uniqueness(cls, v: Any) -> Any: class SchedulerMetadata(BaseModel): - status: WorkflowStatus = Field(default=WorkflowStatus.NEW) - """current status of the workflow""" - step_index: int = 0 - """Index of the current step""" - simulate: bool = False - """Whether or not this workflow is being simulated""" - submitted_time: Optional[datetime] = None - """Time workflow was submitted to the scheduler""" - start_time: Optional[datetime] = None - """Time the workflow started running""" - end_time: Optional[datetime] = None - """Time the workflow finished running""" - duration: Optional[timedelta] = None - """Duration of the workflow's run""" - + """Scheduler information""" + ready_to_run: bool = False + """whether or not the next step in the workflow is ready to run""" + priority: int = 0 class Workflow(WorkflowDefinition): @@ -119,6 +101,23 @@ class Workflow(WorkflowDefinition): """parameter values used inthis workflow""" experiment_id: Optional[str] = None """ID of the experiment this workflow is a part of""" + status: WorkflowStatus = Field(default=WorkflowStatus.QUEUED) + """current status of the workflow""" + step_index: int = 0 + """Index of the current step""" + simulate: bool = False + """Whether or not this workflow is being simulated""" + submitted_time: Optional[datetime] = None + """Time workflow was submitted to the scheduler""" + start_time: Optional[datetime] = None + """Time the workflow started running""" + end_time: Optional[datetime] = None + """Time the workflow finished running""" + duration: Optional[timedelta] = None + """Duration of the workflow's run""" + paused: Optional[bool] = False + """whether or not the workflow is paused""" + def get_step_by_name(self, name: str) -> Step: """Return the step object by its name""" diff --git a/madsci/madsci_workcell_manager/madsci/tests/workflow_tests.py b/madsci/madsci_workcell_manager/madsci/tests/workflow_tests.py index c684b2d..50e92c5 100644 --- a/madsci/madsci_workcell_manager/madsci/tests/workflow_tests.py +++ b/madsci/madsci_workcell_manager/madsci/tests/workflow_tests.py @@ -1,8 +1,14 @@ from madsci.common.types.workflow_types import WorkflowDefinition -from madsci.madsci_client.madsci.client.workflow.workflow_client import WorkflowClient +from madsci.madsci_client.madsci.client.workcell.workcell_client import WorkcellClient import requests from pathlib import Path -client = WorkflowClient("http://localhost:8013") -client.send_workflow(Path("../../../../tests/example/workflows/test_workflow.workflow.yaml").resolve(), {}) \ No newline at end of file +client = WorkcellClient("http://localhost:8013") + +print(client.get_node("liquid_handler")) +print(client.add_node("liquid_handler", "http://localhost:2000", permanent=True)) +wf = client.start_workflow(Path("../../../../tests/example/workflows/test_workflow.workflow.yaml").resolve(), {}) +print(wf.workflow_id) +client.resubmit_workflow(wf.workflow_id) +print(client.get_all_workflows()) \ No newline at end of file diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py index ab84276..db3a172 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py @@ -208,6 +208,16 @@ def set_workflow(self, wf: Workflow) -> None: wf_dump = Workflow.model_validate(wf).model_dump(mode="json") self._workflows[str(wf_dump["workflow_id"])] = wf_dump self.mark_state_changed() + + def set_workflow_quiet(self, wf: Workflow) -> None: + """ + Sets a workflow by ID + """ + if isinstance(wf, Workflow): + wf_dump = wf.model_dump(mode="json") + else: + wf_dump = Workflow.model_validate(wf).model_dump(mode="json") + self._workflows[str(wf_dump["workflow_id"])] = wf_dump def delete_workflow(self, workflow_id: Union[str, str]) -> None: """ diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler.py index 6c99cd3..270271d 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler.py @@ -1,12 +1,6 @@ from schedulers.scheduler import AbstractScheduler -from madsci.common.types.workflow_types import WorkflowStatus -from madsci.common.types.node_types import Node from madsci.common.types.step_types import Step -from madsci.common.utils import threaded_daemon -from madsci.client.node.abstract_node_client import AbstractNodeClient -from workcell_utils import find_node_client -from redis_handler import WorkcellRedisHandler -from madsci.common.types.action_types import ActionRequest, ActionStatus, ActionResult + from datetime import datetime from typing import Optional @@ -14,33 +8,14 @@ class Scheduler(AbstractScheduler): def run_iteration(self): - workflows = sorted(self.state_handler.get_all_workflows().values(), key=lambda item: item.scheduler_metadata.submitted_time) + priority = 0 + workflows = sorted(self.state_handler.get_all_workflows().values(), key=lambda item: item.submitted_time) for wf in workflows: - workflow_id = wf.workflow_id - if wf.scheduler_metadata.status == WorkflowStatus.NEW: - wf.scheduler_metadata.status = WorkflowStatus.QUEUED - print( - f"Processed new workflow: {wf.name} with id: {workflow_id}" - ) - #send_event(WorkflowQueuedEvent.from_wf_run(wf_run=wf_run)) - self.state_handler.set_workflow(wf) - elif wf.scheduler_metadata.status in [ - WorkflowStatus.QUEUED, - WorkflowStatus.IN_PROGRESS, - ]: - step = wf.steps[wf.scheduler_metadata.step_index] - if self.check_step(step): - - #if wf_run.status == WorkflowStatus.QUEUED: - #send_event(WorkflowStartEvent.from_wf_run(wf_run=wf_run)) - wf.scheduler_metadata.status = WorkflowStatus.RUNNING - print( - f"Starting step {wf.name}.{step.name} for workflow: {workflow_id}" - ) - if wf.scheduler_metadata.step_index == 0: - wf.scheduler_metadata.start_time = datetime.now() - self.state_handler.set_workflow(wf) - self.run_step(workflow_id, step) + step = wf.steps[wf.step_index] + wf.scheduler_metadata.ready_to_run = not(wf.paused) and wf.status in ["queued", "in_progress"] and self.check_step(step) + wf.scheduler_metadata.priority = priority + priority -= 1 + self.state_handler.set_workflow_quiet(wf) def check_step(self, step: Step): return self.resource_checks(step) and self.node_checks(step) @@ -49,52 +24,11 @@ def resource_checks(self, step: Step): def node_checks(self, step: Step): node = self.state_handler.get_node(step.node) - if node.status.ready: + if node is not None and node.status.ready: return True return False - def retry_action(self, node: Node, client: AbstractNodeClient, response: Optional[ActionResult] = None): - if node.info.capabilities.get_action_result: - - while response is None or response.status not in ["not_ready", "succeeded", "failed"]: - try: - response = client.get_action_result(request.action_id) - time.sleep(5) - except Exception: - time.sleep(5) - return response - return response - - @threaded_daemon - def run_step(self, workflow_id: str, step: Step): - node = self.state_handler.get_node(step.node) - client = find_node_client(node.node_url) - try: - request = ActionRequest(action_name=step.action, args=step.args, files=step.files) - response = client.send_action(request) - except Exception: - response = self.retry_action(node, client) - response = self.retry_action(node, client, response) - with self.state_handler.wc_state_lock(): - wf = self.state_handler.get_workflow(workflow_id) - if response.status in ["succeeded", "failed"]: - wf.steps[wf.scheduler_metadata.step_index].status = response.status - wf.steps[wf.scheduler_metadata.step_index].results[response.action_id] = response - if response.status == "succeeded": - new_index = wf.scheduler_metadata.step_index + 1 - if new_index == len(wf.flowdef): - wf.scheduler_metadata.status = WorkflowStatus.COMPLETED - else: - wf.scheduler_metadata.step_index = new_index - wf.scheduler_metadata.status = WorkflowStatus.QUEUED - if response.status == "failed": - wf.scheduler_metadata.status = WorkflowStatus.FAILED - #print(self.state_handler.get_all_workflows()) - #print(wf) - self.state_handler.set_workflow(wf) - #print(self.state_handler.get_all_workflows()) - - + diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_engine.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_engine.py index 705b3ff..437cabf 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_engine.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_engine.py @@ -8,12 +8,21 @@ import requests import importlib -#from schedulers.default_scheduler import DefaultScheduler +from madsci.common.types.node_types import Node from workcell_utils import initialize_workcell, update_active_nodes from workcell_manager_types import WorkcellManagerDefinition from redis_handler import WorkcellRedisHandler from workflow_utils import cancel_active_workflows from madsci.common.utils import threaded_daemon +from madsci.client.node.abstract_node_client import AbstractNodeClient +from workcell_utils import find_node_client +from redis_handler import WorkcellRedisHandler +from madsci.common.types.action_types import ActionRequest, ActionStatus, ActionResult +from madsci.common.types.workflow_types import WorkflowStatus +from madsci.common.types.step_types import Step +from datetime import datetime + +from typing import Optional class Engine: """ Handles scheduling workflows and executing steps on the workcell. @@ -59,8 +68,10 @@ def spin(self) -> None: update_active_nodes(self.state_manager) node_tick = time.time() if time.time() - scheduler_tick > self.definition.plugin_config.scheduler_update_interval: - self.scheduler.run_iteration() - scheduler_tick = time.time() + with self.state_manager.wc_state_lock(): + self.scheduler.run_iteration() + self.run_next_step() + scheduler_tick = time.time() except Exception: traceback.print_exc() print( @@ -72,3 +83,67 @@ def spin(self) -> None: def start_engine_thread(self) -> None: """Spins the engine in its own thread""" self.spin() + + def run_next_step(self): + workflows = self.state_manager.get_all_workflows() + ready_workflows = filter(lambda wf: wf.scheduler_metadata.ready_to_run, workflows.values()) + sorted_ready_workflows = sorted(ready_workflows, key=lambda wf: wf.scheduler_metadata.priority) + if len(sorted_ready_workflows) > 0: + next_wf = sorted_ready_workflows[0] + next_wf.status = WorkflowStatus.RUNNING + self.state_manager.set_workflow(next_wf) + self.run_step(next_wf.workflow_id, next_wf.steps[next_wf.step_index]) + + def retry_action(self, node: Node, client: AbstractNodeClient, request: ActionRequest, response: Optional[ActionResult] = None): + if node.info.capabilities.get_action_result: + + while response is None or response.status not in ["not_ready", "succeeded", "failed"]: + try: + response = client.get_action_result(request.action_id) + time.sleep(5) + except Exception: + time.sleep(5) + return response + return response + @threaded_daemon + def run_step(self, workflow_id: str, step: Step): + with self.state_manager.wc_state_lock(): + wf = self.state_manager.get_workflow(workflow_id) + wf.steps[wf.step_index].start_time = datetime.now() + if wf.step_index == 0: + wf.start_time = datetime.now() + self.state_manager.set_workflow(wf) + node = self.state_manager.get_node(step.node) + client = find_node_client(node.node_url) + try: + request = ActionRequest(action_name=step.action, args=step.args, files=step.files) + response = client.send_action(request) + except Exception: + response = self.retry_action(node, client, request) + response = self.retry_action(node, client, request, response) + if response is None: + response = request.failed() + with self.state_manager.wc_state_lock(): + wf = self.state_manager.get_workflow(workflow_id) + if response.status in ["succeeded", "failed"]: + wf.steps[wf.step_index].status = response.status + wf.steps[wf.step_index].results[response.action_id] = response + wf.steps[wf.step_index].end_time = datetime.now() + if response.status == "succeeded": + new_index = wf.step_index + 1 + if new_index == len(wf.flowdef): + wf.status = WorkflowStatus.COMPLETED + wf.end_time = datetime.now() + else: + wf.step_index = new_index + if wf.status == WorkflowStatus.RUNNING: + wf.status = WorkflowStatus.IN_PROGRESS + if response.status == "failed": + wf.status = WorkflowStatus.FAILED + wf.end_time = datetime.now() + #print(self.state_manager.get_all_workflows()) + #print(wf) + self.state_manager.set_workflow(wf) + #print(self.state_manager.get_all_workflows()) + + \ No newline at end of file diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py index ccedcea..7ea69e7 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py @@ -3,24 +3,26 @@ from fastapi import FastAPI, Form, HTTPException, UploadFile from fastapi.datastructures import State from redis_handler import WorkcellRedisHandler +from madsci.common.types.base_types import new_ulid_str +from madsci.common.types.auth_types import OwnershipInfo from madsci.workcell_manager.workcell_manager_types import ( WorkcellManagerDefinition, ) -from madsci.workcell_manager.workflow_utils import create_workflow, save_workflow_files +from madsci.workcell_manager.workflow_utils import create_workflow, save_workflow_files, copy_workflow_files from madsci.workcell_manager.workcell_utils import find_node_client + from workcell_engine import Engine -from typing import Annotated, Optional +from typing import Annotated, Optional, Union from madsci.common.types.workcell_types import WorkcellDefinition -from madsci.common.types.workflow_types import WorkflowDefinition, Workflow -from madsci.common.types.node_types import Node +from madsci.common.types.workflow_types import WorkflowDefinition, Workflow, WorkflowStatus +from madsci.common.types.node_types import Node, NodeDefinition import argparse import json import traceback - - +from datetime import datetime arg_parser = argparse.ArgumentParser() arg_parser.add_argument( "--workcell_file", @@ -51,6 +53,47 @@ def get_nodes() -> dict[str, Node]: """Get information about the resource manager.""" return app.state.state_handler.get_all_nodes() +@app.get("/nodes/{node_name}") +def get_node(node_name: str) -> Union[Node, str]: + """Get information about the resource manager.""" + try: + node = app.state.state_handler.get_node(node_name) + except Exception as e: + return "Node not found!" + return node + +@app.post("/nodes/add_node") +def add_node( + node_name: str, + node_url: str, + node_description: str = "A Node", + permanent: bool = False + ) -> Union[Node, str]: + """Get information about the resource manager.""" + if node_name in app.state.state_handler.get_all_nodes(): + return "Node name exists, node names must be unique!" + node = Node(node_url=node_url) + app.state.state_handler.set_node(node_name, node) + if permanent: + workcell.nodes[node_name] = NodeDefinition(node_name=node_name, node_url=node_url, node_description=node_description) + workcell.to_yaml(workcell_file) + return app.state.state_handler.get_node(node_name) + +@app.post("/nodes/reserve") +def reserve_node( + node_name: str, + ownership_info: Optional[OwnershipInfo] = None, + duration: str, + ) -> Union[Node, str]: + """Get information about the resource manager.""" + node = app.state.state_handler.get_node(node_name) + node.reserved_by = ownership_info + app.state.state_handler.set_node(node_name, node) + + return app.state.state_handler.get_node(node_name) + + + @app.get("/admin/{command}") def send_admin_command(command: str) -> list: """Get information about the resource manager.""" @@ -75,11 +118,86 @@ def send_admin_command_to_node(command: str, node: str) -> list: @app.get("/workflows") -def get_workflows() -> dict[str, Workflow]: +def get_all_workflows() -> dict[str, Workflow]: """Get information about the resource manager.""" return app.state.state_handler.get_all_workflows() -@app.post("/start_workflow") +@app.get("/workflows/{workflow_id}") +def get_workflow(workflow_id: str) -> Workflow: + """Get information about the resource manager.""" + return app.state.state_handler.get_workflow(workflow_id) + +@app.get("/workflows/pause/{workflow_id}") +def pause_workflow(workflow_id: str) -> Workflow: + """Get information about the resource manager.""" + with app.state.state_handler.wc_state_lock(): + wf = app.state.state_handler.get_workflow(workflow_id) + if wf.status in ["running", "in_progress", "queued"]: + if wf.status == "running": + send_admin_command_to_node("pause", wf.steps[wf.step_index].node) + wf.steps[wf.step_index] = ActionStatus.PAUSED + wf.paused = True + app.state.state_handler.set_workflow(wf) + + return app.state.state_handler.get_workflow(workflow_id) + +@app.get("/workflows/resume/{workflow_id}") +def resume_workflow(workflow_id: str) -> Workflow: + """Get information about the resource manager.""" + with app.state.state_handler.wc_state_lock(): + wf = app.state.state_handler.get_workflow(workflow_id) + if wf.paused: + if wf.status == "running": + send_admin_command_to_node("resume", wf.steps[wf.step_index].node) + wf.steps[wf.step_index] = ActionStatus.RUNNING + wf.paused = False + app.state.state_handler.set_workflow(wf) + return app.state.state_handler.get_workflow(workflow_id) + +@app.get("/workflows/cancel/{workflow_id}") +def cancel_workflow(workflow_id: str) -> Workflow: + """Get information about the resource manager.""" + with app.state.state_handler.wc_state_lock(): + wf = app.state.state_handler.get_workflow(workflow_id) + if wf.status == "running": + send_admin_command_to_node("stop", wf.steps[wf.step_index].node) + wf.steps[wf.step_index] = ActionStatus.CANCELLED + wf.status = WorkflowStatus.CANCELLED + app.state.state_handler.set_workflow(wf) + return app.state.state_handler.get_workflow(workflow_id) +@app.get("/workflows/resubmit/{workflow_id}") +def resubmit_workflow(workflow_id: str) -> Workflow: + """Get information about the resource manager.""" + with app.state.state_handler.wc_state_lock(): + wf = app.state.state_handler.get_workflow(workflow_id) + wf.workflow_id = new_ulid_str() + wf.step_index = 0 + wf.start_time = None + wf.end_time = None + wf.submitted_time = datetime.now() + for step in wf.steps: + step.step_id = new_ulid_str() + step.start_time = None + step.end_time = None + step.status = ActionStatus.NOT_STARTED + copy_workflow_files(old_id=workflow_id, workflow=wf, working_directory=workcell_manager_definition.plugin_config.workcell_directory) + app.state.state_handler.set_workflow(wf) + return app.state.state_handler.get_workflow(workflow_id) + +@app.post("/workflows/retry") +def retry_workflow(workflow_id: str, + index: int = -1) -> Workflow: + """Get information about the resource manager.""" + with app.state.state_handler.wc_state_lock(): + wf = app.state.state_handler.get_workflow(workflow_id) + if wf.status in ["completed", "failed"]: + if index >= 0: + wf.step_index = index + wf.status = WorkflowStatus.QUEUED + app.state.state_handler.set_workflow(wf) + return app.state.state_handler.get_workflow(workflow_id) + +@app.post("/workflows/start") async def start_workflow( workflow: Annotated[str, Form()], experiment_id: Annotated[Optional[str], Form()] = None, @@ -143,7 +261,18 @@ async def start_workflow( - +@app.post("/nodes/reserve") +def reserve_nodes(ownership_info: OwnershipInfo, + nodes: list[str]) -> Workflow: + """Get information about the resource manager.""" + with app.state.state_handler.wc_state_lock(): + wf = app.state.state_handler.get_workflow(workflow_id) + if wf.status in ["completed", "failed"]: + if index >= 0: + wf.step_index = index + wf.status = WorkflowStatus.QUEUED + app.state.state_handler.set_workflow(wf) + return app.state.state_handler.get_workflow(workflow_id) if __name__ == "__main__": diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py index 5e14fc2..ff4acb9 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py @@ -10,6 +10,7 @@ import copy from pathlib import Path from datetime import datetime +import shutil def validate_node_names(workflow: Workflow, workcell: WorkcellDefinition) -> None: """ @@ -114,7 +115,7 @@ def create_workflow( steps.append(step) wf.steps = steps - wf.scheduler_metadata.submitted_time = datetime.now() + wf.submitted_time = datetime.now() return wf def save_workflow_files(working_directory: str, workflow: Workflow, files: list[UploadFile]) -> Workflow: @@ -143,6 +144,21 @@ def save_workflow_files(working_directory: str, workflow: Workflow, files: list[ print(f"{step_file_key}: {file_path} ({step_file_path})") return workflow +def copy_workflow_files(working_directory: str, old_id: str, workflow: Workflow) -> Workflow: + """Saves the files to the workflow run directory, + and updates the step files to point to the new location""" + + new = get_workflow_inputs_directory( + workflow_id=workflow.workflow_id, + working_directory=working_directory + ) + old = get_workflow_inputs_directory( + workflow_id=old_id, + working_directory=working_directory + ) + shutil.copytree(old, new) + return workflow + def get_workflow_inputs_directory(workflow_id: str = None, working_directory: str = None) -> Path: """returns a directory name for the workflows inputs""" return Path(working_directory) / "Workflows" / workflow_id / "Inputs" @@ -150,7 +166,7 @@ def get_workflow_inputs_directory(workflow_id: str = None, working_directory: st def cancel_workflow(wf: Workflow, state_manager: WorkcellRedisHandler) -> None: """Cancels the workflow run""" - wf.scheduler_metadata.status = WorkflowStatus.CANCELLED + wf.status = WorkflowStatus.CANCELLED with state_manager.wc_state_lock(): state_manager.set_workflow(wf) return wf @@ -159,7 +175,7 @@ def cancel_workflow(wf: Workflow, state_manager: WorkcellRedisHandler) -> None: def cancel_active_workflows(state_manager: WorkcellRedisHandler) -> None: """Cancels all currently running workflow runs""" for wf in state_manager.get_all_workflows().values(): - if wf.scheduler_metadata.status in [ + if wf.status in [ WorkflowStatus.RUNNING, WorkflowStatus.QUEUED, WorkflowStatus.IN_PROGRESS, diff --git a/tests/example/workcells/test_workcell.workcell.yaml b/tests/example/workcells/test_workcell.workcell.yaml index e9cefc4..cac6930 100644 --- a/tests/example/workcells/test_workcell.workcell.yaml +++ b/tests/example/workcells/test_workcell.workcell.yaml @@ -1,12 +1,26 @@ -name: TestWorkcell -workcell_id: 01JDN5Z858MNVYG7MJW5D6Z94F -description: asldfkj -config: - scheduler_update_interval: 0.1 - node_update_interval: 1.0 - auto_start: true - workcell_directory: "/.MADSci/Workcell" - -nodes: - "liquid_handler": "http://localhost:2000" - +name: TestWorkcell +workcell_id: 01JDN5Z858MNVYG7MJW5D6Z94F +description: asldfkj +config: + workcell_name: Workcell 1 + host: 127.0.0.1 + port: 8013 + workcell_directory: /.MADSci/Workcell + redis_host: localhost + redis_port: 6379 + redis_password: null + scheduler_update_interval: 0.1 + node_update_interval: 1.0 + auto_start: true + clear_workflows: false + cold_start_delay: 0 + scheduler: schedulers.default_scheduler +nodes: + liquid_handler: + node_name: liquid_handler + node_id: 01JFGDTY5DA5H6081EFHXDEEWM + node_url: http://localhost:2000/ + node_description: A Node + module_definition: null + config: [] + commands: {} From 5e8bdd9bbc8c72d06136a5985d0292d9bef4ecb9 Mon Sep 17 00:00:00 2001 From: root Date: Wed, 15 Jan 2025 21:55:52 +0000 Subject: [PATCH 5/5] fixing precommit and state_manger --- .../madsci/client/cli/module_cli.py | 2 +- .../madsci/client/cli/node_cli.py | 2 +- .../madsci/client/node/__init__.py | 1 - .../client/node/abstract_node_client.py | 4 +- .../madsci/client/node/rest_node_client.py | 4 +- .../madsci/client/workcell/__init__.py | 2 - .../madsci/client/workcell/workcell_client.py | 226 +++++++++----- .../madsci_common/madsci/common/exceptions.py | 10 +- .../madsci/common/types/node_types.py | 13 +- .../madsci/common/types/step_types.py | 11 +- .../madsci/common/types/workcell_types.py | 13 +- .../madsci/common/types/workflow_types.py | 17 +- .../madsci/module/abstract_module.py | 4 +- .../madsci/tests/workflow_tests.py | 13 +- .../madsci/workcell_manager/redis_handler.py | 96 +++--- .../schedulers/default_scheduler.py | 58 ++-- .../workcell_manager/schedulers/scheduler.py | 27 +- .../workcell_manager/workcell_engine.py | 133 ++++---- .../workcell_manager_types.py | 3 - .../workcell_manager/workcell_server.py | 143 +++++---- .../madsci/workcell_manager/workcell_utils.py | 43 ++- .../madsci/workcell_manager/workflow_utils.py | 99 +++--- ruff.toml | 1 + .../workflows/test_workflow.workflow.yaml | 1 - tests/test_cli.ipynb | 288 +++++++++--------- tests/test_module.ipynb | 158 +++++----- 26 files changed, 749 insertions(+), 623 deletions(-) diff --git a/madsci/madsci_client/madsci/client/cli/module_cli.py b/madsci/madsci_client/madsci/client/cli/module_cli.py index 6149bd9..ebba03e 100644 --- a/madsci/madsci_client/madsci/client/cli/module_cli.py +++ b/madsci/madsci_client/madsci/client/cli/module_cli.py @@ -81,7 +81,7 @@ def module(ctx: Context, name: Optional[str], path: Optional[str]) -> None: help="The template of the module configuration to use.", ) @click.pass_context -def create( # noqa: PLR0913 +def create( ctx: Context, name: Optional[str], path: Optional[str], diff --git a/madsci/madsci_client/madsci/client/cli/node_cli.py b/madsci/madsci_client/madsci/client/cli/node_cli.py index 4fe13a9..a54fb2d 100644 --- a/madsci/madsci_client/madsci/client/cli/node_cli.py +++ b/madsci/madsci_client/madsci/client/cli/node_cli.py @@ -136,7 +136,7 @@ def node(ctx: Context, name: Optional[str], path: Optional[str]) -> None: help="Don't add node to any workcell.", ) @click.pass_context -def create( # noqa: PLR0913 +def create( ctx: Context, name: Optional[str], path: Optional[str], diff --git a/madsci/madsci_client/madsci/client/node/__init__.py b/madsci/madsci_client/madsci/client/node/__init__.py index 4255409..2513a69 100644 --- a/madsci/madsci_client/madsci/client/node/__init__.py +++ b/madsci/madsci_client/madsci/client/node/__init__.py @@ -8,7 +8,6 @@ } - __all__ = [ "NODE_CLIENT_MAP", "AbstractNodeClient", diff --git a/madsci/madsci_client/madsci/client/node/abstract_node_client.py b/madsci/madsci_client/madsci/client/node/abstract_node_client.py index a1bf685..9a56312 100644 --- a/madsci/madsci_client/madsci/client/node/abstract_node_client.py +++ b/madsci/madsci_client/madsci/client/node/abstract_node_client.py @@ -1,6 +1,7 @@ """Base node client implementation.""" from typing import Any, ClassVar + from pydantic import AnyUrl from madsci.common.types.action_types import ( @@ -14,13 +15,13 @@ NodeClientCapabilities, ) from madsci.common.types.node_types import ( - Node, NodeInfo, NodeSetConfigResponse, NodeStatus, ) from madsci.common.types.resource_types import ResourceDefinition + class AbstractNodeClient: """Base Node Client, protocol agnostic, all node clients should inherit from or be based on this.""" @@ -77,6 +78,7 @@ def get_resources(self) -> dict[str, ResourceDefinition]: def get_log(self) -> list[Event]: """Get the log of the node.""" raise NotImplementedError("get_log is not implemented by this client") + @classmethod def validate_url(cls, url: AnyUrl) -> bool: """check if a url matches this node type""" diff --git a/madsci/madsci_client/madsci/client/node/rest_node_client.py b/madsci/madsci_client/madsci/client/node/rest_node_client.py index c854c14..5960118 100644 --- a/madsci/madsci_client/madsci/client/node/rest_node_client.py +++ b/madsci/madsci_client/madsci/client/node/rest_node_client.py @@ -3,9 +3,9 @@ import json from pathlib import Path from typing import Any, ClassVar -from pydantic import AnyUrl import requests +from pydantic import AnyUrl from madsci.client.node.abstract_node_client import ( AbstractNodeClient, @@ -18,7 +18,6 @@ NodeClientCapabilities, ) from madsci.common.types.node_types import ( - Node, NodeInfo, NodeSetConfigResponse, NodeStatus, @@ -57,7 +56,6 @@ def send_action(self, action_request: ActionRequest) -> ActionResult: ("files", (file, Path(path).open("rb"))) # noqa: SIM115 for file, path in action_request.files.items() ] - rest_response = requests.post( f"{self.url}/action", diff --git a/madsci/madsci_client/madsci/client/workcell/__init__.py b/madsci/madsci_client/madsci/client/workcell/__init__.py index ce335cc..059a3ff 100644 --- a/madsci/madsci_client/madsci/client/workcell/__init__.py +++ b/madsci/madsci_client/madsci/client/workcell/__init__.py @@ -2,8 +2,6 @@ from madsci.client.workcell.workcell_client import WorkcellClient - - __all__ = [ "WorkcellClient", ] diff --git a/madsci/madsci_client/madsci/client/workcell/workcell_client.py b/madsci/madsci_client/madsci/client/workcell/workcell_client.py index 64316e6..191598b 100644 --- a/madsci/madsci_client/madsci/client/workcell/workcell_client.py +++ b/madsci/madsci_client/madsci/client/workcell/workcell_client.py @@ -1,24 +1,38 @@ -from madsci.common.types.workflow_types import Workflow, WorkflowDefinition, WorkflowStatus -from madsci.common.exceptions import WorkflowFailedException, WorkflowCanceledException -from typing import Any, Optional +"""client for performing workcell actions""" -from pathlib import Path -import re import copy -import requests import json +import re import time -from datetime import datetime +from pathlib import Path +from typing import Any, Optional + +import requests + +from madsci.common.exceptions import WorkflowFailedException +from madsci.common.types.auth_types import OwnershipInfo +from madsci.common.types.workflow_types import ( + Workflow, + WorkflowDefinition, + WorkflowStatus, +) + class WorkcellClient: """a client for running workflows""" - def __init__(self, workcell_manager_url: str, working_directory: str="~/.MADsci/temp", ownership_info: Optional[OwnershipInfo] = None) -> "WorkflowClient": + + def __init__( + self, + workcell_manager_url: str, + working_directory: str = "~/.MADsci/temp", + ownership_info: Optional[OwnershipInfo] = None, + ) -> "WorkcellClient": """initialize the client""" self.url = workcell_manager_url self.working_directory = Path(working_directory) self.ownership_info = ownership_info - - def query_workflow(self, workflow_id: str) -> Workflow: + + def query_workflow(self, workflow_id: str) -> Optional[Workflow]: """Checks on a workflow run using the id given Parameters @@ -34,20 +48,22 @@ def query_workflow(self, workflow_id: str) -> Workflow: The JSON portion of the response from the server""" url = f"{self.url}/workflows/{workflow_id}" - response = requests.get(url) + response = requests.get(url, timeout=10) if response.ok: return Workflow(**response.json()) - else: - response.raise_for_status() + response.raise_for_status() + return None - def start_workflow(self, - workflow: str, - parameters: dict, - validate_only: bool = False, - blocking: bool = True, - raise_on_failed: bool = True, - raise_on_cancelled: bool = True) -> Workflow: + def start_workflow( + self, + workflow: str, + parameters: dict, + validate_only: bool = False, + blocking: bool = True, + raise_on_failed: bool = True, + raise_on_cancelled: bool = True, + ) -> Workflow: """send a workflow to the workcell manager""" workflow = WorkflowDefinition.from_yaml(workflow) WorkflowDefinition.model_validate(workflow) @@ -59,18 +75,22 @@ def start_workflow(self, data={ "workflow": workflow.model_dump_json(), "parameters": json.dumps(parameters), - "validate_only": validate_only - }, + "validate_only": validate_only, + }, files={ ("files", (str(Path(path).name), Path.open(Path(path), "rb"))) for _, path in files.items() }, - ) + timeout=10, + ) if not blocking: return Workflow(**response.json()) - else: - return self.await_workflow(response.json()["workflow_id"], raise_on_cancelled=raise_on_cancelled, raise_on_failed=raise_on_failed) - + return self.await_workflow( + response.json()["workflow_id"], + raise_on_cancelled=raise_on_cancelled, + raise_on_failed=raise_on_failed, + ) + def _extract_files_from_workflow( self, workflow: WorkflowDefinition ) -> dict[str, Any]: @@ -89,16 +109,27 @@ def _extract_files_from_workflow( ) step.files[file] = Path(files[unique_filename]).name return files - def run_workflows_in_order(workflows: list[str], parameters: list[dict[str: Any]]): + + def run_workflows_in_order( + self, workflows: list[str], parameters: list[dict[str:Any]] + ) -> list[Workflow]: + """run a list of workflows in order""" + wfs = [] for i in range(len(workflows)): - self.start_workflow(workflows[i], parameters[i], blocking=True) - def run_workflow_batch(workflows: list[str], parameters: list[dict[str: Any]]): + wf = self.start_workflow(workflows[i], parameters[i], blocking=True) + wfs.append(wf) + return wfs + + def run_workflow_batch( + self, workflows: list[str], parameters: list[dict[str:Any]] + ) -> list[Workflow]: + """run a batch of workflows in no particular order""" id_list = [] for i in range(len(workflows)): response = self.start_workflow(workflows[i], parameters[i], blocking=False) id_list.append(response.json()["workflow_id"]) finished = False - while finished == False: + while not finished: flag = True wfs = [] for id in id_list: @@ -107,29 +138,46 @@ def run_workflow_batch(workflows: list[str], parameters: list[dict[str: Any]]): wfs.append(wf) finished = flag return wfs - def retry_workflow(self, workflow_id: str, index: int = -1): - url = f"{self.url}/workflows/retry" - response = requests.post(url, - params={ - "workflow_id": node_name, - "index": index, - }) - return response.json() - def resubmit_workflow(self, workflow_id: str, - blocking: bool = True, - raise_on_failed: bool = True, - raise_on_cancelled: bool = True): - url = f"{self.url}/workflows/resubmit/{workflow_id}" - response = requests.get(url) - new_wf = Workflow(**response.json()) - if blocking: - return self.await_workflow(new_wf.workflow_id, raise_on_failed=raise_on_failed, raise_on_cancelled=raise_on_cancelled) - else: - return new_wf - - def await_workflow(self, workflow_id: str, - raise_on_failed: bool = True, - raise_on_cancelled: bool = True): + + def retry_workflow(self, workflow_id: str, index: int = -1) -> dict: + """rerun an exisiting wf using the same wf id""" + url = f"{self.url}/workflows/retry" + response = requests.post( + url, + params={ + "workflow_id": workflow_id, + "index": index, + }, + timeout=10, + ) + return response.json() + + def resubmit_workflow( + self, + workflow_id: str, + blocking: bool = True, + raise_on_failed: bool = True, + raise_on_cancelled: bool = True, + ) -> Workflow: + """resubmit an existing workflows as a new workflow with a new id""" + url = f"{self.url}/workflows/resubmit/{workflow_id}" + response = requests.get(url, timeout=10) + new_wf = Workflow(**response.json()) + if blocking: + return self.await_workflow( + new_wf.workflow_id, + raise_on_failed=raise_on_failed, + raise_on_cancelled=raise_on_cancelled, + ) + return new_wf + + def await_workflow( + self, + workflow_id: str, + raise_on_failed: bool = True, + raise_on_cancelled: bool = True, + ) -> Workflow: + """await a workflows completion""" prior_status = None prior_index = None while True: @@ -167,38 +215,50 @@ def await_workflow(self, workflow_id: str, f"Workflow {wf.name} ({wf.workflow_id}) was cancelled on step {wf.step_index}: '{wf.steps[wf.step_index].name}'." ) return wf - def get_all_nodes(self): + + def get_all_nodes(self) -> dict: + """get all nodes in the workcell""" url = f"{self.url}/nodes" - response = requests.get(url) + response = requests.get(url, timeout=10) return response.json() - def get_node(self, node_name): + + def get_node(self, node_name: str) -> dict: + """get a single node from a workcell""" url = f"{self.url}/nodes/{node_name}" - response = requests.get(url) + response = requests.get(url, timeout=10) return response.json() - def add_node(self, node_name: str, node_url: str, node_description: str="A Node", permanent: bool=False): - url = f"{self.url}/nodes/add_node" - response = requests.post(url, - params={ - "node_name": node_name, - "node_url": node_url, - "node_description": node_description, - "permanent": permanent - }) - return response.json() - def reserve_node(self, node_name: str, duration: datetime): - url = f"{self.url}/nodes/reserve" - response = requests.post(url, - params={ - "node_name": node_name, - "duration": str(datetime) - "ownership_info": self.ownership_info - }) - return response.json() - def get_all_workflows(self): + + def add_node( + self, + node_name: str, + node_url: str, + node_description: str = "A Node", + permanent: bool = False, + ) -> dict: + """add a node to a workcell""" + url = f"{self.url}/nodes/add_node" + response = requests.post( + url, + params={ + "node_name": node_name, + "node_url": node_url, + "node_description": node_description, + "permanent": permanent, + }, + timeout=10, + ) + return response.json() + + def get_all_workflows(self) -> dict: + """get all workflows from a workcell manager""" url = f"{self.url}/workflows" - response = requests.get(url) + response = requests.get(url, timeout=100) return response.json() -def insert_parameter_values(workflow: WorkflowDefinition, parameters: dict[str, Any]) -> Workflow: + + +def insert_parameter_values( + workflow: WorkflowDefinition, parameters: dict[str, Any] +) -> Workflow: """Replace the parameter strings in the workflow with the provided values""" for param in workflow.parameters: if param.name not in parameters: @@ -220,7 +280,10 @@ def insert_parameter_values(workflow: WorkflowDefinition, parameters: dict[str, steps.append(step) workflow.flowdef = steps -def walk_and_replace(args: dict[str, Any], input_parameters: dict[str, Any]) -> dict[str, Any]: + +def walk_and_replace( + args: dict[str, Any], input_parameters: dict[str, Any] +) -> dict[str, Any]: """Recursively walk the arguments and replace all parameters""" new_args = copy.deepcopy(args) for key, val in args.items(): @@ -283,6 +346,3 @@ def value_substitution(input_string: str, input_parameters: dict[str, Any]) -> s + ", please define it in the parameters section of the Workflow Definition." ) return input_string - - - diff --git a/madsci/madsci_common/madsci/common/exceptions.py b/madsci/madsci_common/madsci/common/exceptions.py index 43c9872..bbfc9db 100644 --- a/madsci/madsci_common/madsci/common/exceptions.py +++ b/madsci/madsci_common/madsci/common/exceptions.py @@ -12,18 +12,20 @@ class ActionMissingFileError(ValueError): class ActionNotImplementedError(ValueError): """An action was requested, but isn't implemented by the node""" -class WorkflowFailedException(Exception): + +class WorkflowFailedError(Exception): """Raised when a workflow fails""" - def __init__(self, message: str): + def __init__(self, message: str) -> "WorkflowFailedError": """Initializes the exception""" super().__init__(message) self.message = message -class WorkflowCanceledException(Exception): + +class WorkflowCanceledError(Exception): """Raised when a workflow is canceled""" - def __init__(self, message: str): + def __init__(self, message: str) -> "WorkflowCanceledError": """Initializes the exception""" super().__init__(message) self.message = message diff --git a/madsci/madsci_common/madsci/common/types/node_types.py b/madsci/madsci_common/madsci/common/types/node_types.py index be7156b..e06dbe0 100644 --- a/madsci/madsci_common/madsci/common/types/node_types.py +++ b/madsci/madsci_common/madsci/common/types/node_types.py @@ -1,9 +1,9 @@ """MADSci Node Types.""" +from datetime import datetime from os import PathLike from pathlib import Path from typing import Any, Optional, Union -from datetime import datetime from pydantic import Field from pydantic.fields import computed_field @@ -137,12 +137,12 @@ class Node(BaseModel, arbitrary_types_allowed=True): state: Optional[dict[str, Any]] = Field( default=None, title="Node State", - description="Detailed nodes specific state information" + description="Detailed nodes specific state information", ) reserved_by: Optional["Reservation"] = Field( default=None, title="Reserved By", - description="Ownership unit that is reserving this node" + description="Ownership unit that is reserving this node", ) @@ -275,11 +275,14 @@ def description(self) -> str: return "; ".join(reasons) return "Node is ready" + class Reservation(BaseModel): + """a reservation of a module""" + owned_by: OwnershipInfo - + started: datetime - + class NodeSetConfigResponse(BaseModel): """Response from a Node Set Config Request""" diff --git a/madsci/madsci_common/madsci/common/types/step_types.py b/madsci/madsci_common/madsci/common/types/step_types.py index 55af879..36ba72b 100644 --- a/madsci/madsci_common/madsci/common/types/step_types.py +++ b/madsci/madsci_common/madsci/common/types/step_types.py @@ -11,19 +11,21 @@ class Condition(BaseModel): """A model for the conditions a step needs to be run""" + resource: str = Field( title="Condition Target Resource", description="The resource targeted by the condition", ) field: str = Field( title="Condition Target Field", - description="The field in the target resource targeted by the condition", + description="The field in the target resource targeted by the condition", ) value: Any = Field( title="Condition Target Resource", description="The resource targeted by the condition", ) + class StepDefinition(BaseModel): """A definition of a step in a workflow.""" @@ -40,10 +42,7 @@ class StepDefinition(BaseModel): title="Step Action", description="The action to perform in the step.", ) - node: str = Field( - title="Node Name", - description="Name of the node to run on" - ) + node: str = Field(title="Node Name", description="Name of the node to run on") args: dict[str, Any] = Field( title="Step Arguments", description="Arguments for the step action.", @@ -57,7 +56,7 @@ class StepDefinition(BaseModel): conditions: list[Condition] = Field( title="Step Conditions", description="Conditions for running the step", - default_factory=list + default_factory=list, ) data_labels: dict[str, str] = Field( title="Step Data Labels", diff --git a/madsci/madsci_common/madsci/common/types/workcell_types.py b/madsci/madsci_common/madsci/common/types/workcell_types.py index d9d4aea..9cdcba1 100644 --- a/madsci/madsci_common/madsci/common/types/workcell_types.py +++ b/madsci/madsci_common/madsci/common/types/workcell_types.py @@ -44,6 +44,7 @@ class WorkcellDefinition(BaseModel, extra="allow"): class WorkcellConfig(BaseModel): """Configuration for a MADSci Workcell.""" + workcell_name: str = Field( default="Workcell 1", title="Name", @@ -62,7 +63,7 @@ class WorkcellConfig(BaseModel): workcell_directory: str = Field( default="/.MADsci/Workcell", title="Workcell Directory", - description="Directory to save workflow files" + description="Directory to save workflow files", ) redis_host: str = Field( default="localhost", @@ -89,6 +90,11 @@ class WorkcellConfig(BaseModel): title="Node Update Interval", description="The interval at which the workcell queries its node's states, in seconds.Must be <= scheduler_update_interval", ) + heartbeat_interval: float = Field( + default=2.0, + title="Heartbeat Interval", + description="The interval at which the workcell queries its node's states, in seconds.Must be <= scheduler_update_interval", + ) auto_start: bool = Field( default=True, title="Auto Start", @@ -107,6 +113,5 @@ class WorkcellConfig(BaseModel): scheduler: str = Field( default="schedulers.default_scheduler", title="scheduler", - description="Scheduler module in the workcell manager scheduler folder with a Scheduler class that inherits from AbstractScheduler to use" - - ) \ No newline at end of file + description="Scheduler module in the workcell manager scheduler folder with a Scheduler class that inherits from AbstractScheduler to use", + ) diff --git a/madsci/madsci_common/madsci/common/types/workflow_types.py b/madsci/madsci_common/madsci/common/types/workflow_types.py index 0628922..2a55570 100644 --- a/madsci/madsci_common/madsci/common/types/workflow_types.py +++ b/madsci/madsci_common/madsci/common/types/workflow_types.py @@ -1,14 +1,18 @@ +"""Types for MADSci Worfklow running.""" + from datetime import datetime, timedelta from enum import Enum -from typing import Any, Optional, Union +from typing import Any, ClassVar, Optional, Union from pydantic import Field, field_validator from madsci.common.types.base_types import BaseModel, new_ulid_str from madsci.common.types.step_types import Step + class WorkflowStatus(str, Enum): """Status for a workflow run""" + QUEUED = "queued" """Workflow run is queued, hasn't started yet""" RUNNING = "running" @@ -42,6 +46,7 @@ class WorkflowParameter(BaseModel): default: Optional[Any] = None """ the default value of the parameter""" + class WorkflowMetadata(BaseModel, extra="allow"): """Metadata container""" @@ -52,6 +57,7 @@ class WorkflowMetadata(BaseModel, extra="allow"): version: Union[float, str] = "" """Version of the object""" + class WorkflowDefinition(BaseModel): """Grand container that pulls all info of a workflow together""" @@ -59,12 +65,11 @@ class WorkflowDefinition(BaseModel): """Name of the workflow""" workflow_metadata: WorkflowMetadata = Field(default_factory=WorkflowMetadata) """Information about the flow""" - parameters: Optional[list[WorkflowParameter]] = [] + parameters: ClassVar[Optional[list[WorkflowParameter]]] = [] """Inputs to the workflow""" flowdef: list[Step] """User Submitted Steps of the flow""" - @field_validator("flowdef", mode="after") @classmethod def ensure_data_label_uniqueness(cls, v: Any) -> Any: @@ -79,9 +84,9 @@ def ensure_data_label_uniqueness(cls, v: Any) -> Any: return v - class SchedulerMetadata(BaseModel): """Scheduler information""" + ready_to_run: bool = False """whether or not the next step in the workflow is ready to run""" priority: int = 0 @@ -89,13 +94,14 @@ class SchedulerMetadata(BaseModel): class Workflow(WorkflowDefinition): """Container for a workflow run""" + scheduler_metadata: SchedulerMetadata = Field(default_factory=SchedulerMetadata) """scheduler information for the workflow run""" label: Optional[str] = None """Label for the workflow run""" workflow_id: str = Field(default_factory=new_ulid_str) """ID of the workflow run""" - steps: list[Step] = [] + steps: ClassVar[list[Step]] = [] """WEI Processed Steps of the flow""" parameter_values: dict[str, Any] = Field(default_factory={}) """parameter values used inthis workflow""" @@ -118,7 +124,6 @@ class Workflow(WorkflowDefinition): paused: Optional[bool] = False """whether or not the workflow is paused""" - def get_step_by_name(self, name: str) -> Step: """Return the step object by its name""" for step in self.steps: diff --git a/madsci/madsci_module/madsci/module/abstract_module.py b/madsci/madsci_module/madsci/module/abstract_module.py index d30abe1..2565544 100644 --- a/madsci/madsci_module/madsci/module/abstract_module.py +++ b/madsci/madsci_module/madsci/module/abstract_module.py @@ -472,12 +472,10 @@ def _parse_action_args( else: self.logger.log_info(f"Ignoring unexpected argument {arg_name}") for file in action_request.files: - if file in parameters: arg_dict[file] = action_request.files[file] else: - #self.logger.log_info(f"Ignoring unexpected file {file}") - pass + self.logger.log_info(f"Ignoring unexpected file {file}") return arg_dict def _check_required_args( diff --git a/madsci/madsci_workcell_manager/madsci/tests/workflow_tests.py b/madsci/madsci_workcell_manager/madsci/tests/workflow_tests.py index 50e92c5..f9f4c12 100644 --- a/madsci/madsci_workcell_manager/madsci/tests/workflow_tests.py +++ b/madsci/madsci_workcell_manager/madsci/tests/workflow_tests.py @@ -1,14 +1,17 @@ -from madsci.common.types.workflow_types import WorkflowDefinition -from madsci.madsci_client.madsci.client.workcell.workcell_client import WorkcellClient -import requests +"""basic tests for workflow functionality""" + from pathlib import Path +from madsci.madsci_client.madsci.client.workcell.workcell_client import WorkcellClient client = WorkcellClient("http://localhost:8013") print(client.get_node("liquid_handler")) print(client.add_node("liquid_handler", "http://localhost:2000", permanent=True)) -wf = client.start_workflow(Path("../../../../tests/example/workflows/test_workflow.workflow.yaml").resolve(), {}) +wf = client.start_workflow( + Path("../../../../tests/example/workflows/test_workflow.workflow.yaml").resolve(), + {}, +) print(wf.workflow_id) client.resubmit_workflow(wf.workflow_id) -print(client.get_all_workflows()) \ No newline at end of file +print(client.get_all_workflows()) diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py index db3a172..ad86c97 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/redis_handler.py @@ -3,17 +3,17 @@ """ import warnings -from typing import Any, Callable, Dict, Union +from typing import Any, Callable, Union import redis -from pottery import InefficientAccessWarning, RedisDict, Redlock, RedisList +from pottery import InefficientAccessWarning, RedisDict, RedisList, Redlock from pydantic import ValidationError +from madsci.common.types.base_types import new_ulid_str +from madsci.common.types.node_types import Node, NodeDefinition from madsci.common.types.workcell_types import WorkcellDefinition from madsci.common.types.workflow_types import Workflow -from madsci.common.types.base_types import new_ulid_str from madsci.workcell_manager.workcell_manager_types import WorkcellManagerDefinition -from madsci.common.types.node_types import Node, NodeDefinition class WorkcellRedisHandler: @@ -60,23 +60,23 @@ def _workcell(self) -> RedisDict: return RedisDict( key=f"{self._workcell_prefix}:workcell", redis=self._redis_client ) + @property def _nodes(self) -> RedisDict: - return RedisDict( - key=f"{self._workcell_prefix}:nodes", redis=self._redis_client - ) + return RedisDict(key=f"{self._workcell_prefix}:nodes", redis=self._redis_client) + @property def _workflow_queue(self) -> RedisList: return RedisList( key=f"{self._workcell_prefix}:workflow_queue", redis=self._redis_client ) + @property def _workflows(self) -> RedisDict: return RedisDict( key=f"{self._workcell_prefix}:workflows", redis=self._redis_client ) - def wc_state_lock(self) -> Redlock: """ Gets a lock on the workcell's state. This should be called before any state updates are made, @@ -89,7 +89,7 @@ def wc_state_lock(self) -> Redlock: ) # *State Methods - def get_state(self) -> Dict[str, Dict[Any, Any]]: + def get_state(self) -> dict[str, dict[Any, Any]]: """ Return a dict containing the current state of the workcell. """ @@ -104,7 +104,6 @@ def get_state(self) -> Dict[str, Dict[Any, Any]]: "shutdown": self.shutdown, } - @property def error(self) -> str: """Latest error on the server""" @@ -117,9 +116,7 @@ def error(self, value: str) -> None: self.mark_state_changed() return self._redis_client.set(f"{self._workcell_prefix}:error", value) - def clear_state( - self, clear_workflows: bool = False - ) -> None: + def clear_state(self, clear_workflows: bool = False) -> None: """ Clears the state of the workcell, optionally leaving the locations state intact. """ @@ -191,9 +188,7 @@ def get_all_workflows(self) -> dict[str, Workflow]: valid_workflows = {} for workflow_id, workflow in self._workflows.to_dict().items(): try: - valid_workflows[str(workflow_id)] = Workflow.model_validate( - workflow - ) + valid_workflows[str(workflow_id)] = Workflow.model_validate(workflow) except ValidationError: continue return valid_workflows @@ -208,7 +203,7 @@ def set_workflow(self, wf: Workflow) -> None: wf_dump = Workflow.model_validate(wf).model_dump(mode="json") self._workflows[str(wf_dump["workflow_id"])] = wf_dump self.mark_state_changed() - + def set_workflow_quiet(self, wf: Workflow) -> None: """ Sets a workflow by ID @@ -235,39 +230,40 @@ def update_workflow( self.set_workflow(func(self.get_workflow(workflow_id), *args, **kwargs)) def get_node(self, node_name: str) -> Node: - """ - Returns a node by name - """ - return Node.model_validate(self._nodes[node_name]) - - def get_all_nodes(self) -> Dict[str, Node]: - """ - Returns all nodes - """ - valid_nodes = {} - for node_name, node in self._nodes.to_dict().items(): - try: - valid_nodes[str(node_name)] = Node.model_validate(node) - except ValidationError: - continue - return valid_nodes + """ + Returns a node by name + """ + return Node.model_validate(self._nodes[node_name]) + + def get_all_nodes(self) -> dict[str, Node]: + """ + Returns all nodes + """ + valid_nodes = {} + for node_name, node in self._nodes.to_dict().items(): + try: + valid_nodes[str(node_name)] = Node.model_validate(node) + except ValidationError: + continue + return valid_nodes def set_node( - self, node_name: str, node: Union[Node, NodeDefinition, Dict[str, Any]] - ) -> None: - """ - Sets a node by name - """ - if isinstance(node, Node): - node_dump = node.model_dump(mode="json") - elif isinstance(node, NodeDefinition): - node_dump = Node.model_validate( - node, from_attributes=True - ).model_dump(mode="json") - else: - node_dump = Node.model_validate(node).model_dump(mode="json") - self._nodes[node_name] = node_dump - self.mark_state_changed() + self, node_name: str, node: Union[Node, NodeDefinition, dict[str, Any]] + ) -> None: + """ + Sets a node by name + """ + if isinstance(node, Node): + node_dump = node.model_dump(mode="json") + elif isinstance(node, NodeDefinition): + node_dump = Node.model_validate(node, from_attributes=True).model_dump( + mode="json" + ) + else: + node_dump = Node.model_validate(node).model_dump(mode="json") + self._nodes[node_name] = node_dump + self.mark_state_changed() + def delete_node(self, node_name: str) -> None: """ Deletes a node by name @@ -281,6 +277,4 @@ def update_node( """ Updates the state of a node. """ - self.set_node( - node_name, func(self.get_node(node_name), *args, **kwargs) - ) + self.set_node(node_name, func(self.get_node(node_name), *args, **kwargs)) diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler.py index 270271d..84c66bf 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/default_scheduler.py @@ -1,42 +1,40 @@ +"""the default scheduler for the system""" + from schedulers.scheduler import AbstractScheduler -from madsci.common.types.step_types import Step -from datetime import datetime -from typing import Optional +from madsci.common.types.step_types import Step class Scheduler(AbstractScheduler): + """the main class that handles checking whether steps are ready to run and assigning priority""" - def run_iteration(self): + def run_iteration(self) -> None: + """run an iteration of the scheduler and set priority for which workflow to run next""" priority = 0 - workflows = sorted(self.state_handler.get_all_workflows().values(), key=lambda item: item.submitted_time) + workflows = sorted( + self.state_handler.get_all_workflows().values(), + key=lambda item: item.submitted_time, + ) for wf in workflows: - step = wf.steps[wf.step_index] - wf.scheduler_metadata.ready_to_run = not(wf.paused) and wf.status in ["queued", "in_progress"] and self.check_step(step) - wf.scheduler_metadata.priority = priority - priority -= 1 - self.state_handler.set_workflow_quiet(wf) - def check_step(self, step: Step): + step = wf.steps[wf.step_index] + wf.scheduler_metadata.ready_to_run = ( + not (wf.paused) + and wf.status in ["queued", "in_progress"] + and self.check_step(step) + ) + wf.scheduler_metadata.priority = priority + priority -= 1 + self.state_handler.set_workflow_quiet(wf) + + def check_step(self, step: Step) -> bool: + """check if a step is ready to run""" return self.resource_checks(step) and self.node_checks(step) - - def resource_checks(self, step: Step): + + def resource_checks(self, step: Step) -> bool: # noqa: ARG002 + """check if the resources for the step are ready TODO: actually check""" return True - def node_checks(self, step: Step): + def node_checks(self, step: Step) -> bool: + """check if the step node is ready to run a step""" node = self.state_handler.get_node(step.node) - if node is not None and node.status.ready: - return True - return False - - - - - - - - - - - - - + return node is not None and node.status.ready diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/scheduler.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/scheduler.py index c5ebd8b..d87f9bc 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/scheduler.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/schedulers/scheduler.py @@ -1,17 +1,26 @@ +"""the abstract class for schedulers""" + +from madsci.common.types.event_types import Event from madsci.workcell_manager.redis_handler import WorkcellRedisHandler from madsci.workcell_manager.workcell_manager_types import WorkcellManagerDefinition -from madsci.common.types.event_types import Event -def send_event(test: Event): - pass +def send_event(test: Event) -> None: + """send an event to the server""" -class AbstractScheduler: - def __init__(self, workcell_manager_definition: WorkcellManagerDefinition, state_handler: WorkcellRedisHandler): + +class AbstractScheduler: + """abstract definition of a scheduler""" + + def __init__( + self, + workcell_manager_definition: WorkcellManagerDefinition, + state_handler: WorkcellRedisHandler, + ) -> "AbstractScheduler": + """sets the state handler and workcell definition""" self.state_handler = state_handler self.workcell_manager_definition = workcell_manager_definition self.running = True - def run_iteration(self): - pass - - + + def run_iteration(self) -> None: + """run an iteration of the scheduler""" diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_engine.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_engine.py index 437cabf..6eb7ae2 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_engine.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_engine.py @@ -2,73 +2,84 @@ Engine Class and associated helpers and data """ -import time import importlib +import time import traceback +from datetime import datetime +from typing import Optional -import requests -import importlib -from madsci.common.types.node_types import Node -from workcell_utils import initialize_workcell, update_active_nodes -from workcell_manager_types import WorkcellManagerDefinition from redis_handler import WorkcellRedisHandler +from workcell_manager_types import WorkcellManagerDefinition +from workcell_utils import find_node_client, initialize_workcell, update_active_nodes from workflow_utils import cancel_active_workflows -from madsci.common.utils import threaded_daemon + from madsci.client.node.abstract_node_client import AbstractNodeClient -from workcell_utils import find_node_client -from redis_handler import WorkcellRedisHandler -from madsci.common.types.action_types import ActionRequest, ActionStatus, ActionResult -from madsci.common.types.workflow_types import WorkflowStatus +from madsci.common.types.action_types import ActionRequest, ActionResult +from madsci.common.types.node_types import Node from madsci.common.types.step_types import Step -from datetime import datetime +from madsci.common.types.workflow_types import WorkflowStatus +from madsci.common.utils import threaded_daemon + -from typing import Optional class Engine: """ Handles scheduling workflows and executing steps on the workcell. Pops incoming workflows off a redis-based queue and executes them. """ - def __init__(self, workcell_manager_definition: WorkcellManagerDefinition, state_manager: WorkcellRedisHandler) -> None: + def __init__( + self, + workcell_manager_definition: WorkcellManagerDefinition, + state_handler: WorkcellRedisHandler, + ) -> None: """Initialize the scheduler.""" - state_manager.clear_state( + state_handler.clear_state( clear_workflows=workcell_manager_definition.plugin_config.clear_workflows ) self.definition = workcell_manager_definition - self.state_manager = state_manager - cancel_active_workflows(state_manager) - scheduler_module = importlib.import_module(self.definition.plugin_config.scheduler) - self.scheduler = scheduler_module.Scheduler(self.definition, self.state_manager) - with state_manager.wc_state_lock(): - initialize_workcell(state_manager) + self.state_handler = state_handler + cancel_active_workflows(state_handler) + scheduler_module = importlib.import_module( + self.definition.plugin_config.scheduler + ) + self.scheduler = scheduler_module.Scheduler(self.definition, self.state_handler) + with state_handler.wc_state_lock(): + initialize_workcell(state_handler) time.sleep(workcell_manager_definition.plugin_config.cold_start_delay) print("Engine initialized, waiting for workflows...") - #send_event(WorkcellStartEvent(workcell=state_manager.get_workcell())) + # TODO send event def spin(self) -> None: """ Continuously loop, updating module states every Config.update_interval seconds. If the state of the workcell has changed, update the active modules and run the scheduler. """ - update_active_nodes(self.state_manager) + update_active_nodes(self.state_handler) node_tick = time.time() scheduler_tick = time.time() heartbeat = time.time() - while True and not self.state_manager.shutdown: + while True and not self.state_handler.shutdown: try: - if time.time() - heartbeat > 2: + if ( + time.time() - heartbeat + > self.definition.plugin_config.heartbeat_interval + ): heartbeat = time.time() print(f"Heartbeat: {time.time()}") if ( - time.time() - node_tick > self.definition.plugin_config.node_update_interval - or self.state_manager.has_state_changed() + time.time() - node_tick + > self.definition.plugin_config.node_update_interval + or self.state_handler.has_state_changed() ): - if not self.state_manager.paused: - update_active_nodes(self.state_manager) + if not self.state_handler.paused: + update_active_nodes(self.state_handler) node_tick = time.time() - if time.time() - scheduler_tick > self.definition.plugin_config.scheduler_update_interval: - with self.state_manager.wc_state_lock(): + if ( + time.time() - scheduler_tick + > self.definition.plugin_config.scheduler_update_interval + ): + with self.state_handler.wc_state_lock(): self.scheduler.run_iteration() self.run_next_step() scheduler_tick = time.time() @@ -83,21 +94,36 @@ def spin(self) -> None: def start_engine_thread(self) -> None: """Spins the engine in its own thread""" self.spin() - - def run_next_step(self): - workflows = self.state_manager.get_all_workflows() - ready_workflows = filter(lambda wf: wf.scheduler_metadata.ready_to_run, workflows.values()) - sorted_ready_workflows = sorted(ready_workflows, key=lambda wf: wf.scheduler_metadata.priority) + + def run_next_step(self) -> None: + """runs the next step in the workflow with the highest priority""" + workflows = self.state_handler.get_all_workflows() + ready_workflows = filter( + lambda wf: wf.scheduler_metadata.ready_to_run, workflows.values() + ) + sorted_ready_workflows = sorted( + ready_workflows, key=lambda wf: wf.scheduler_metadata.priority + ) if len(sorted_ready_workflows) > 0: next_wf = sorted_ready_workflows[0] next_wf.status = WorkflowStatus.RUNNING - self.state_manager.set_workflow(next_wf) + self.state_handler.set_workflow(next_wf) self.run_step(next_wf.workflow_id, next_wf.steps[next_wf.step_index]) - def retry_action(self, node: Node, client: AbstractNodeClient, request: ActionRequest, response: Optional[ActionResult] = None): + def retry_action( + self, + node: Node, + client: AbstractNodeClient, + request: ActionRequest, + response: Optional[ActionResult] = None, + ) -> ActionResult: + """retry an action if it fails""" if node.info.capabilities.get_action_result: - - while response is None or response.status not in ["not_ready", "succeeded", "failed"]: + while response is None or response.status not in [ + "not_ready", + "succeeded", + "failed", + ]: try: response = client.get_action_result(request.action_id) time.sleep(5) @@ -105,26 +131,30 @@ def retry_action(self, node: Node, client: AbstractNodeClient, request: ActionRe time.sleep(5) return response return response + @threaded_daemon - def run_step(self, workflow_id: str, step: Step): - with self.state_manager.wc_state_lock(): - wf = self.state_manager.get_workflow(workflow_id) + def run_step(self, workflow_id: str, step: Step) -> None: + """run a step in a seperate thread""" + with self.state_handler.wc_state_lock(): + wf = self.state_handler.get_workflow(workflow_id) wf.steps[wf.step_index].start_time = datetime.now() if wf.step_index == 0: wf.start_time = datetime.now() - self.state_manager.set_workflow(wf) - node = self.state_manager.get_node(step.node) + self.state_handler.set_workflow(wf) + node = self.state_handler.get_node(step.node) client = find_node_client(node.node_url) try: - request = ActionRequest(action_name=step.action, args=step.args, files=step.files) + request = ActionRequest( + action_name=step.action, args=step.args, files=step.files + ) response = client.send_action(request) except Exception: response = self.retry_action(node, client, request) response = self.retry_action(node, client, request, response) if response is None: response = request.failed() - with self.state_manager.wc_state_lock(): - wf = self.state_manager.get_workflow(workflow_id) + with self.state_handler.wc_state_lock(): + wf = self.state_handler.get_workflow(workflow_id) if response.status in ["succeeded", "failed"]: wf.steps[wf.step_index].status = response.status wf.steps[wf.step_index].results[response.action_id] = response @@ -141,9 +171,4 @@ def run_step(self, workflow_id: str, step: Step): if response.status == "failed": wf.status = WorkflowStatus.FAILED wf.end_time = datetime.now() - #print(self.state_manager.get_all_workflows()) - #print(wf) - self.state_manager.set_workflow(wf) - #print(self.state_manager.get_all_workflows()) - - \ No newline at end of file + self.state_handler.set_workflow(wf) diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_manager_types.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_manager_types.py index 56fd3ab..8072a7c 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_manager_types.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_manager_types.py @@ -2,7 +2,6 @@ from sqlmodel.main import Field -from madsci.common.types.base_types import BaseModel from madsci.common.types.squid_types import ManagerDefinition from madsci.common.types.workcell_types import WorkcellConfig @@ -20,5 +19,3 @@ class WorkcellManagerDefinition(ManagerDefinition): title="Plugin Configuration", description="The configuration for the workcell manager plugin.", ) - - diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py index 7ea69e7..dc87d45 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_server.py @@ -1,28 +1,34 @@ """MADSci Workcell Manager Server.""" +import argparse +import json +import traceback +from datetime import datetime +from typing import Annotated, Optional, Union + from fastapi import FastAPI, Form, HTTPException, UploadFile -from fastapi.datastructures import State from redis_handler import WorkcellRedisHandler -from madsci.common.types.base_types import new_ulid_str -from madsci.common.types.auth_types import OwnershipInfo +from workcell_engine import Engine +from madsci.common.types.action_types import ActionStatus +from madsci.common.types.base_types import new_ulid_str +from madsci.common.types.node_types import Node, NodeDefinition +from madsci.common.types.workcell_types import WorkcellDefinition +from madsci.common.types.workflow_types import ( + Workflow, + WorkflowDefinition, + WorkflowStatus, +) from madsci.workcell_manager.workcell_manager_types import ( WorkcellManagerDefinition, ) - -from madsci.workcell_manager.workflow_utils import create_workflow, save_workflow_files, copy_workflow_files from madsci.workcell_manager.workcell_utils import find_node_client +from madsci.workcell_manager.workflow_utils import ( + copy_workflow_files, + create_workflow, + save_workflow_files, +) -from workcell_engine import Engine - -from typing import Annotated, Optional, Union -from madsci.common.types.workcell_types import WorkcellDefinition -from madsci.common.types.workflow_types import WorkflowDefinition, Workflow, WorkflowStatus -from madsci.common.types.node_types import Node, NodeDefinition -import argparse -import json -import traceback -from datetime import datetime arg_parser = argparse.ArgumentParser() arg_parser.add_argument( "--workcell_file", @@ -30,68 +36,69 @@ default="./workcells/workcell.yaml", help="location of the workcell file", ) + + async def lifespan(app: FastAPI) -> None: - app.state.state_handler=WorkcellRedisHandler(workcell_manager_definition) + """start the server functionality and initialize the state handler""" + app.state.state_handler = WorkcellRedisHandler(workcell_manager_definition) app.state.state_handler.set_workcell(workcell) engine = Engine(workcell_manager_definition, app.state.state_handler) engine.start_engine_thread() yield + + app = FastAPI(lifespan=lifespan) + @app.get("/info") def info() -> WorkcellManagerDefinition: """Get information about the resource manager.""" return workcell_manager_definition + @app.get("/workcell") def get_workcell() -> WorkcellDefinition: """Get information about the resource manager.""" return app.state.state_handler.get_workcell() + @app.get("/nodes") def get_nodes() -> dict[str, Node]: """Get information about the resource manager.""" return app.state.state_handler.get_all_nodes() + @app.get("/nodes/{node_name}") def get_node(node_name: str) -> Union[Node, str]: """Get information about the resource manager.""" try: node = app.state.state_handler.get_node(node_name) - except Exception as e: + except Exception: return "Node not found!" return node + @app.post("/nodes/add_node") def add_node( node_name: str, node_url: str, node_description: str = "A Node", - permanent: bool = False - ) -> Union[Node, str]: + permanent: bool = False, +) -> Union[Node, str]: """Get information about the resource manager.""" if node_name in app.state.state_handler.get_all_nodes(): return "Node name exists, node names must be unique!" node = Node(node_url=node_url) app.state.state_handler.set_node(node_name, node) if permanent: - workcell.nodes[node_name] = NodeDefinition(node_name=node_name, node_url=node_url, node_description=node_description) + workcell.nodes[node_name] = NodeDefinition( + node_name=node_name, node_url=node_url, node_description=node_description + ) workcell.to_yaml(workcell_file) return app.state.state_handler.get_node(node_name) -@app.post("/nodes/reserve") -def reserve_node( - node_name: str, - ownership_info: Optional[OwnershipInfo] = None, - duration: str, - ) -> Union[Node, str]: - """Get information about the resource manager.""" - node = app.state.state_handler.get_node(node_name) - node.reserved_by = ownership_info - app.state.state_handler.set_node(node_name, node) - - return app.state.state_handler.get_node(node_name) +# TODO add node reserve endpoint @app.get("/admin/{command}") @@ -105,6 +112,7 @@ def send_admin_command(command: str) -> list: responses.append(response) return responses + @app.get("/admin/{command}/{node}") def send_admin_command_to_node(command: str, node: str) -> list: """Get information about the resource manager.""" @@ -122,11 +130,13 @@ def get_all_workflows() -> dict[str, Workflow]: """Get information about the resource manager.""" return app.state.state_handler.get_all_workflows() + @app.get("/workflows/{workflow_id}") def get_workflow(workflow_id: str) -> Workflow: """Get information about the resource manager.""" return app.state.state_handler.get_workflow(workflow_id) + @app.get("/workflows/pause/{workflow_id}") def pause_workflow(workflow_id: str) -> Workflow: """Get information about the resource manager.""" @@ -134,13 +144,14 @@ def pause_workflow(workflow_id: str) -> Workflow: wf = app.state.state_handler.get_workflow(workflow_id) if wf.status in ["running", "in_progress", "queued"]: if wf.status == "running": - send_admin_command_to_node("pause", wf.steps[wf.step_index].node) - wf.steps[wf.step_index] = ActionStatus.PAUSED + send_admin_command_to_node("pause", wf.steps[wf.step_index].node) + wf.steps[wf.step_index] = ActionStatus.PAUSED wf.paused = True app.state.state_handler.set_workflow(wf) - + return app.state.state_handler.get_workflow(workflow_id) + @app.get("/workflows/resume/{workflow_id}") def resume_workflow(workflow_id: str) -> Workflow: """Get information about the resource manager.""" @@ -148,23 +159,26 @@ def resume_workflow(workflow_id: str) -> Workflow: wf = app.state.state_handler.get_workflow(workflow_id) if wf.paused: if wf.status == "running": - send_admin_command_to_node("resume", wf.steps[wf.step_index].node) - wf.steps[wf.step_index] = ActionStatus.RUNNING + send_admin_command_to_node("resume", wf.steps[wf.step_index].node) + wf.steps[wf.step_index] = ActionStatus.RUNNING wf.paused = False app.state.state_handler.set_workflow(wf) return app.state.state_handler.get_workflow(workflow_id) + @app.get("/workflows/cancel/{workflow_id}") def cancel_workflow(workflow_id: str) -> Workflow: """Get information about the resource manager.""" with app.state.state_handler.wc_state_lock(): wf = app.state.state_handler.get_workflow(workflow_id) if wf.status == "running": - send_admin_command_to_node("stop", wf.steps[wf.step_index].node) - wf.steps[wf.step_index] = ActionStatus.CANCELLED + send_admin_command_to_node("stop", wf.steps[wf.step_index].node) + wf.steps[wf.step_index] = ActionStatus.CANCELLED wf.status = WorkflowStatus.CANCELLED app.state.state_handler.set_workflow(wf) return app.state.state_handler.get_workflow(workflow_id) + + @app.get("/workflows/resubmit/{workflow_id}") def resubmit_workflow(workflow_id: str) -> Workflow: """Get information about the resource manager.""" @@ -180,13 +194,17 @@ def resubmit_workflow(workflow_id: str) -> Workflow: step.start_time = None step.end_time = None step.status = ActionStatus.NOT_STARTED - copy_workflow_files(old_id=workflow_id, workflow=wf, working_directory=workcell_manager_definition.plugin_config.workcell_directory) + copy_workflow_files( + old_id=workflow_id, + workflow=wf, + working_directory=workcell_manager_definition.plugin_config.workcell_directory, + ) app.state.state_handler.set_workflow(wf) return app.state.state_handler.get_workflow(workflow_id) + @app.post("/workflows/retry") -def retry_workflow(workflow_id: str, - index: int = -1) -> Workflow: +def retry_workflow(workflow_id: str, index: int = -1) -> Workflow: """Get information about the resource manager.""" with app.state.state_handler.wc_state_lock(): wf = app.state.state_handler.get_workflow(workflow_id) @@ -197,6 +215,7 @@ def retry_workflow(workflow_id: str, app.state.state_handler.set_workflow(wf) return app.state.state_handler.get_workflow(workflow_id) + @app.post("/workflows/start") async def start_workflow( workflow: Annotated[str, Form()], @@ -237,10 +256,11 @@ async def start_workflow( else: parameters = json.loads(parameters) if not isinstance(parameters, dict) or not all( - isinstance(k, str) for k in parameters.keys() + isinstance(k, str) for k in parameters ): raise HTTPException( - status_code=400, detail="Parameters must be a dictionary with string keys" + status_code=400, + detail="Parameters must be a dictionary with string keys", ) workcell = app.state.state_handler.get_workcell() @@ -249,43 +269,34 @@ async def start_workflow( workcell=workcell, experiment_id=experiment_id, parameters=parameters, - state_manager=app.state.state_handler + state_handler=app.state.state_handler, ) if not validate_only: - wf = save_workflow_files(working_directory=workcell_manager_definition.plugin_config.workcell_directory, workflow=wf, files=files) + wf = save_workflow_files( + working_directory=workcell_manager_definition.plugin_config.workcell_directory, + workflow=wf, + files=files, + ) with app.state.state_handler.wc_state_lock(): app.state.state_handler.set_workflow(wf) return wf - - -@app.post("/nodes/reserve") -def reserve_nodes(ownership_info: OwnershipInfo, - nodes: list[str]) -> Workflow: - """Get information about the resource manager.""" - with app.state.state_handler.wc_state_lock(): - wf = app.state.state_handler.get_workflow(workflow_id) - if wf.status in ["completed", "failed"]: - if index >= 0: - wf.step_index = index - wf.status = WorkflowStatus.QUEUED - app.state.state_handler.set_workflow(wf) - return app.state.state_handler.get_workflow(workflow_id) +# TODO add reserv nodes endpoint if __name__ == "__main__": import uvicorn - args = arg_parser.parse_args() + + args = arg_parser.parse_args() workcell_file = args.workcell_file workcell = WorkcellDefinition.from_yaml(workcell_file) workcell_manager_definition = WorkcellManagerDefinition( - name="Workcell Manager 1", - description="The First MADSci Workcell Manager.", - plugin_config=workcell.config, - manager_type="workcell_manager" - + name="Workcell Manager 1", + description="The First MADSci Workcell Manager.", + plugin_config=workcell.config, + manager_type="workcell_manager", ) uvicorn.run( app, diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_utils.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_utils.py index 9170f4c..9d7a5fc 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_utils.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workcell_utils.py @@ -1,13 +1,20 @@ -from redis_handler import WorkcellRedisHandler -from pydantic import AnyUrl -from madsci.common.types.workcell_types import WorkcellDefinition -from madsci.common.types.node_types import NodeStatus, Node, NodeDefinition -from madsci.client.node import AbstractNodeClient, NODE_CLIENT_MAP +"""utility functions for the workcell""" + import concurrent -import traceback import warnings +from typing import Optional -def initialize_workcell(state_manager: WorkcellRedisHandler, workcell=None) -> None: +from pydantic import AnyUrl +from redis_handler import WorkcellRedisHandler + +from madsci.client.node import NODE_CLIENT_MAP, AbstractNodeClient +from madsci.common.types.node_types import Node, NodeDefinition +from madsci.common.types.workcell_types import WorkcellDefinition + + +def initialize_workcell( + state_manager: WorkcellRedisHandler, workcell: Optional[WorkcellDefinition] = None +) -> None: """ Initializes the state of the workcell from the workcell definition. """ @@ -17,7 +24,11 @@ def initialize_workcell(state_manager: WorkcellRedisHandler, workcell=None) -> N initialize_workcell_nodes(workcell, state_manager) initialize_workcell_resources(workcell) -def initialize_workcell_nodes(workcell: WorkcellDefinition, state_manager: WorkcellRedisHandler): + +def initialize_workcell_nodes( + workcell: WorkcellDefinition, state_manager: WorkcellRedisHandler +) -> None: + """create the nodes for the given workcell""" for key, value in workcell.nodes.items(): if type(value) is NodeDefinition: node = Node(node_url=value.node_url) @@ -26,10 +37,8 @@ def initialize_workcell_nodes(workcell: WorkcellDefinition, state_manager: Workc state_manager.set_node(key, node) -def initialize_workcell_resources(workcell): - pass - - +def initialize_workcell_resources(workcell: WorkcellDefinition) -> None: + """create the resources for a given workcell definition""" def find_node_client(url: str) -> AbstractNodeClient: @@ -42,19 +51,22 @@ def find_node_client(url: str) -> AbstractNodeClient: return client(url) return None + def update_active_nodes(state_manager: WorkcellRedisHandler) -> None: """Update all active nodes in the workcell.""" with concurrent.futures.ThreadPoolExecutor() as executor: node_futures = [] for node_name, node in state_manager.get_all_nodes().items(): - node_future = executor.submit(update_node, node_name, node, state_manager) - node_futures.append(node_future) + node_future = executor.submit(update_node, node_name, node, state_manager) + node_futures.append(node_future) # Wait for all node updates to complete concurrent.futures.wait(node_futures) -def update_node(node_name: str, node: Node, state_manager: WorkcellRedisHandler) -> None: +def update_node( + node_name: str, node: Node, state_manager: WorkcellRedisHandler +) -> None: """Update a single node's state and about information.""" try: old_status = node.status @@ -72,4 +84,3 @@ def update_node(node_name: str, node: Node, state_manager: WorkcellRedisHandler) category=UserWarning, stacklevel=1, ) - diff --git a/madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py b/madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py index ff4acb9..fb321e3 100644 --- a/madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py +++ b/madsci/madsci_workcell_manager/madsci/workcell_manager/workflow_utils.py @@ -1,33 +1,39 @@ """Utility function for the workcell manager.""" -from madsci.common.types.workcell_types import WorkcellDefinition -from madsci.common.types.workflow_types import Workflow, WorkflowDefinition, WorkflowStatus -from madsci.common.types.step_types import Step -from madsci.common.types.node_types import Node -from redis_handler import WorkcellRedisHandler -from typing import Optional, Any -from fastapi import UploadFile -import re -import copy -from pathlib import Path -from datetime import datetime + import shutil +from datetime import datetime +from pathlib import Path +from typing import Any, Optional + +from fastapi import UploadFile +from redis_handler import WorkcellRedisHandler + +from madsci.common.types.step_types import Step +from madsci.common.types.workcell_types import WorkcellDefinition +from madsci.common.types.workflow_types import ( + Workflow, + WorkflowDefinition, + WorkflowStatus, +) + def validate_node_names(workflow: Workflow, workcell: WorkcellDefinition) -> None: """ Validates that the nodes in the workflow.flowdef are in the workcell.modules """ for node_name in [step.node for step in workflow.flowdef]: - if not node_name in workcell.nodes: - raise ValueError(f"Node {node_name} not in Workcell {workcell.name}") + if node_name not in workcell.nodes: + raise ValueError(f"Node {node_name} not in Workcell {workcell.name}") + + +def replace_locations(workcell: WorkcellDefinition, step: Step) -> None: + """Allow the user to put location names instead of joint angle value""" -def replace_positions(workcell: WorkcellDefinition, step: Step): - """Allow the user to put location names instead of """ - pass -def validate_step(step: Step, state_manager: WorkcellRedisHandler) -> tuple[bool, str]: +def validate_step(step: Step, state_handler: WorkcellRedisHandler) -> tuple[bool, str]: """Check if a step is valid based on the module's about""" - if step.node in state_manager.get_all_nodes(): - node = state_manager.get_node(step.node) + if step.node in state_handler.get_all_nodes(): + node = state_handler.get_node(step.node) info = node.info if info is None: return ( @@ -55,20 +61,18 @@ def validate_step(step: Step, state_manager: WorkcellRedisHandler) -> tuple[bool False, f"Step '{step.name}': Node {step.node} has no action '{step.action}'", ) - else: - return ( - False, - f"Step '{step.name}': Node {step.node} is not defined in workcell", - ) + return ( + False, + f"Step '{step.name}': Node {step.node} is not defined in workcell", + ) def create_workflow( workflow_def: WorkflowDefinition, workcell: WorkcellDefinition, - state_manager: WorkcellRedisHandler, + state_handler: WorkcellRedisHandler, experiment_id: Optional[str] = None, parameters: Optional[dict[str, Any]] = None, - simulate: bool = False, ) -> Workflow: """Pulls the workcell and builds a list of dictionary steps to be executed @@ -100,15 +104,14 @@ def create_workflow( { "label": workflow_def.name, "experiment_id": experiment_id, - "simulate": simulate, - "parameter_values": parameters + "parameter_values": parameters, } ) wf = Workflow(**wf_dict) steps = [] for step in workflow_def.flowdef: - replace_positions(workcell, step) - valid, validation_string = validate_step(step, state_manager=state_manager) + replace_locations(workcell, step) + valid, validation_string = validate_step(step, state_handler=state_handler) print(validation_string) if not valid: raise ValueError(validation_string) @@ -118,13 +121,15 @@ def create_workflow( wf.submitted_time = datetime.now() return wf -def save_workflow_files(working_directory: str, workflow: Workflow, files: list[UploadFile]) -> Workflow: + +def save_workflow_files( + working_directory: str, workflow: Workflow, files: list[UploadFile] +) -> Workflow: """Saves the files to the workflow run directory, and updates the step files to point to the new location""" get_workflow_inputs_directory( - workflow_id=workflow.workflow_id, - working_directory=working_directory + workflow_id=workflow.workflow_id, working_directory=working_directory ).mkdir(parents=True, exist_ok=True) if files: for file in files: @@ -144,40 +149,44 @@ def save_workflow_files(working_directory: str, workflow: Workflow, files: list[ print(f"{step_file_key}: {file_path} ({step_file_path})") return workflow -def copy_workflow_files(working_directory: str, old_id: str, workflow: Workflow) -> Workflow: + +def copy_workflow_files( + working_directory: str, old_id: str, workflow: Workflow +) -> Workflow: """Saves the files to the workflow run directory, and updates the step files to point to the new location""" new = get_workflow_inputs_directory( - workflow_id=workflow.workflow_id, - working_directory=working_directory + workflow_id=workflow.workflow_id, working_directory=working_directory ) old = get_workflow_inputs_directory( - workflow_id=old_id, - working_directory=working_directory + workflow_id=old_id, working_directory=working_directory ) shutil.copytree(old, new) return workflow -def get_workflow_inputs_directory(workflow_id: str = None, working_directory: str = None) -> Path: + +def get_workflow_inputs_directory( + workflow_id: Optional[str] = None, working_directory: Optional[str] = None +) -> Path: """returns a directory name for the workflows inputs""" return Path(working_directory) / "Workflows" / workflow_id / "Inputs" -def cancel_workflow(wf: Workflow, state_manager: WorkcellRedisHandler) -> None: +def cancel_workflow(wf: Workflow, state_handler: WorkcellRedisHandler) -> None: """Cancels the workflow run""" wf.status = WorkflowStatus.CANCELLED - with state_manager.wc_state_lock(): - state_manager.set_workflow(wf) + with state_handler.wc_state_lock(): + state_handler.set_workflow(wf) return wf -def cancel_active_workflows(state_manager: WorkcellRedisHandler) -> None: +def cancel_active_workflows(state_handler: WorkcellRedisHandler) -> None: """Cancels all currently running workflow runs""" - for wf in state_manager.get_all_workflows().values(): + for wf in state_handler.get_all_workflows().values(): if wf.status in [ WorkflowStatus.RUNNING, WorkflowStatus.QUEUED, WorkflowStatus.IN_PROGRESS, ]: - cancel_workflow(wf, state_manager=state_manager) + cancel_workflow(wf, state_handler=state_handler) diff --git a/ruff.toml b/ruff.toml index 76e5ff9..8b31b4b 100644 --- a/ruff.toml +++ b/ruff.toml @@ -95,6 +95,7 @@ ignore = [ "B006", # Do not use mutable data structures for argument defaults "ANN401", # Don't flag Any types "COM812", # Don't fight over trailing commas + "PLR0913", # Allow for a reasonable number of arguments ] # Allow fix for all enabled rules (when `--fix`) is provided. diff --git a/tests/example/workflows/test_workflow.workflow.yaml b/tests/example/workflows/test_workflow.workflow.yaml index 23d6a0a..b1f7008 100644 --- a/tests/example/workflows/test_workflow.workflow.yaml +++ b/tests/example/workflows/test_workflow.workflow.yaml @@ -21,4 +21,3 @@ flowdef: command: thingy comment: Get a new plate files: {"protocol": "/workspaces/MADSci/tests/example/protocols/protocol.txt"} - diff --git a/tests/test_cli.ipynb b/tests/test_cli.ipynb index 898134c..1618942 100644 --- a/tests/test_cli.ipynb +++ b/tests/test_cli.ipynb @@ -1,144 +1,144 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import os\n", - "from pathlib import Path\n", - "\n", - "if Path.cwd().stem == \"test_lab\":\n", - " os.chdir(\"..\")\n", - "\n", - "path = Path.cwd() / \"test_lab\"\n", - "if path.exists():\n", - " print(\"Directory test_lab/ already exists, removing...\")\n", - "\n", - " def remove_children(path: Path) -> None:\n", - " \"\"\"Recursively remove all children of a directory.\"\"\"\n", - " for child in path.iterdir():\n", - " if child.is_file():\n", - " child.unlink()\n", - " else:\n", - " remove_children(child)\n", - " child.rmdir()\n", - "\n", - " remove_children(path)\n", - " if path.is_dir():\n", - " path.rmdir()\n", - "\n", - "path.mkdir()\n", - "os.chdir(path)\n", - "print(Path.cwd())" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Test creating a lab\n", - "!madsci -q lab create --name \"test_lab\"\n", - "!madsci -q lab create --name \"test_lab_2\" --description \"A test lab with a description\"\n", - "!madsci -q lab list\n", - "!madsci -q lab validate\n", - "!madsci -q lab info\n", - "!madsci -q lab --name test_lab_2 delete\n", - "!madsci -q lab list\n", - "!madsci -q lab add-command --command_name \"echo\" --command \"echo 'Hello, world!'\"\n", - "!madsci -q lab run echo\n", - "!madsci -q lab delete-command echo\n", - "!madsci -q lab info" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "!madsci -q workcell create --name \"test_workcell\"\n", - "!madsci -q workcell create --name \"test_workcell_2\" --description \"A test workcell with a description\"\n", - "!madsci -q lab info\n", - "!madsci -q workcell list\n", - "!madsci -q workcell info\n", - "!madsci -q workcell validate\n", - "!madsci -q workcell delete\n", - "!madsci -q workcell list\n", - "!madsci -q lab info" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "!madsci -q module create --name \"test_module\"\n", - "!madsci -q module create --name \"test_module_2\" --description \"A test module with a description\"\n", - "!madsci -q lab info\n", - "!madsci -q module list\n", - "!madsci -q module info\n", - "!madsci -q module delete\n", - "!madsci -q module list\n", - "!madsci -q lab info\n", - "!madsci -q module add-command --command_name \"echo\" --command \"echo 'Hello, world!'\"\n", - "!madsci -q module run echo\n", - "!madsci -q module delete-command --command_name echo\n", - "!madsci -q module info" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# *Test CLI node commands\n", - "!madsci -q node create --name \"test_node\"\n", - "!madsci -q node create --name \"test_node_2\" --description \"A test node with a description\"\n", - "!madsci -q workcell info\n", - "!madsci -q node list\n", - "!madsci -q node info\n", - "!madsci -q node delete\n", - "!madsci -q node list\n", - "!madsci -q workcell info\n", - "!madsci -q node add-command --command_name \"echo\" --command \"echo 'Hello, world!'\"\n", - "!madsci -q node run echo\n", - "!madsci -q node delete-command echo\n", - "!madsci -q node info" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": ".venv", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.2" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "from pathlib import Path\n", + "\n", + "if Path.cwd().stem == \"test_lab\":\n", + " os.chdir(\"..\")\n", + "\n", + "path = Path.cwd() / \"test_lab\"\n", + "if path.exists():\n", + " print(\"Directory test_lab/ already exists, removing...\")\n", + "\n", + " def remove_children(path: Path) -> None:\n", + " \"\"\"Recursively remove all children of a directory.\"\"\"\n", + " for child in path.iterdir():\n", + " if child.is_file():\n", + " child.unlink()\n", + " else:\n", + " remove_children(child)\n", + " child.rmdir()\n", + "\n", + " remove_children(path)\n", + " if path.is_dir():\n", + " path.rmdir()\n", + "\n", + "path.mkdir()\n", + "os.chdir(path)\n", + "print(Path.cwd())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Test creating a lab\n", + "!madsci -q lab create --name \"test_lab\"\n", + "!madsci -q lab create --name \"test_lab_2\" --description \"A test lab with a description\"\n", + "!madsci -q lab list\n", + "!madsci -q lab validate\n", + "!madsci -q lab info\n", + "!madsci -q lab --name test_lab_2 delete\n", + "!madsci -q lab list\n", + "!madsci -q lab add-command --command_name \"echo\" --command \"echo 'Hello, world!'\"\n", + "!madsci -q lab run echo\n", + "!madsci -q lab delete-command echo\n", + "!madsci -q lab info" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!madsci -q workcell create --name \"test_workcell\"\n", + "!madsci -q workcell create --name \"test_workcell_2\" --description \"A test workcell with a description\"\n", + "!madsci -q lab info\n", + "!madsci -q workcell list\n", + "!madsci -q workcell info\n", + "!madsci -q workcell validate\n", + "!madsci -q workcell delete\n", + "!madsci -q workcell list\n", + "!madsci -q lab info" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!madsci -q module create --name \"test_module\"\n", + "!madsci -q module create --name \"test_module_2\" --description \"A test module with a description\"\n", + "!madsci -q lab info\n", + "!madsci -q module list\n", + "!madsci -q module info\n", + "!madsci -q module delete\n", + "!madsci -q module list\n", + "!madsci -q lab info\n", + "!madsci -q module add-command --command_name \"echo\" --command \"echo 'Hello, world!'\"\n", + "!madsci -q module run echo\n", + "!madsci -q module delete-command --command_name echo\n", + "!madsci -q module info" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# *Test CLI node commands\n", + "!madsci -q node create --name \"test_node\"\n", + "!madsci -q node create --name \"test_node_2\" --description \"A test node with a description\"\n", + "!madsci -q workcell info\n", + "!madsci -q node list\n", + "!madsci -q node info\n", + "!madsci -q node delete\n", + "!madsci -q node list\n", + "!madsci -q workcell info\n", + "!madsci -q node add-command --command_name \"echo\" --command \"echo 'Hello, world!'\"\n", + "!madsci -q node run echo\n", + "!madsci -q node delete-command echo\n", + "!madsci -q node info" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.2" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/tests/test_module.ipynb b/tests/test_module.ipynb index 90ddf0b..7bc2a63 100644 --- a/tests/test_module.ipynb +++ b/tests/test_module.ipynb @@ -1,79 +1,79 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from rich import print\n", - "\n", - "from madsci.client.node.rest_node_client import RestNodeClient\n", - "from madsci.common.types.action_types import ActionRequest\n", - "from madsci.common.types.node_types import Node" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "node = Node(node_url=\"http://localhost:2000\")\n", - "node_client = RestNodeClient(node)\n", - "\n", - "info = node_client.get_info()\n", - "print(info)\n", - "status = node_client.get_status()\n", - "print(status)\n", - "state = node_client.get_state()\n", - "print(state)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "response = node_client.send_action(\n", - " ActionRequest(action_name=\"run_command\", args={\"command\": \"status\"}),\n", - ")\n", - "print(response)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "action_history = node_client.get_action_history()\n", - "action_id = action_history[0]\n", - "action_response = node_client.get_action_result(action_id)\n", - "print(action_response)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": ".venv", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.9.2" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from rich import print\n", + "\n", + "from madsci.client.node.rest_node_client import RestNodeClient\n", + "from madsci.common.types.action_types import ActionRequest\n", + "from madsci.common.types.node_types import Node" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "node = Node(node_url=\"http://localhost:2000\")\n", + "node_client = RestNodeClient(node)\n", + "\n", + "info = node_client.get_info()\n", + "print(info)\n", + "status = node_client.get_status()\n", + "print(status)\n", + "state = node_client.get_state()\n", + "print(state)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "response = node_client.send_action(\n", + " ActionRequest(action_name=\"run_command\", args={\"command\": \"status\"}),\n", + ")\n", + "print(response)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "action_history = node_client.get_action_history()\n", + "action_id = action_history[0]\n", + "action_response = node_client.get_action_result(action_id)\n", + "print(action_response)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": ".venv", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.2" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +}