From 13de8cd42bc71d5f4a4e039395f070c72115c7c3 Mon Sep 17 00:00:00 2001 From: Rafael Vescovi <17483715+ravescovi@users.noreply.github.com> Date: Thu, 7 Mar 2024 15:54:08 -0600 Subject: [PATCH] inits --- .github/workflows/docker-build.yml | 76 +++++++++++ .github/workflows/docker-publish.yml | 74 +++++++++++ .github/workflows/pre-commit.yml | 15 +++ .github/workflows/tests.yml | 19 +++ .gitignore | 162 +++++++++++++++++++++++ .pre-commit-config.yaml | 26 ++++ CODE_OF_CONDUCT.md | 128 +++++++++++++++++++ Dockerfile | 23 ++++ LICENSE | 21 +++ Makefile | 34 +++++ compose.yaml | 19 +++ example.env | 11 ++ pyproject.toml | 137 ++++++++++++++++++++ src/mir_driver/__init__.py | 1 + src/mir_driver/mir_driver.py | 147 +++++++++++++++++++++ src/mir_rest_node.py | 170 +++++++++++++++++++++++++ tests/test_base.py | 25 ++++ tests/test_module.py | 61 +++++++++ tests/workcell_defs/test_workcell.yaml | 19 +++ tests/workflow_defs/test_workflow.yaml | 15 +++ wei.compose.yaml | 48 +++++++ 21 files changed, 1231 insertions(+) create mode 100644 .github/workflows/docker-build.yml create mode 100644 .github/workflows/docker-publish.yml create mode 100644 .github/workflows/pre-commit.yml create mode 100644 .github/workflows/tests.yml create mode 100644 .gitignore create mode 100644 .pre-commit-config.yaml create mode 100644 CODE_OF_CONDUCT.md create mode 100644 Dockerfile create mode 100644 LICENSE create mode 100644 Makefile create mode 100644 compose.yaml create mode 100644 example.env create mode 100644 pyproject.toml create mode 100644 src/mir_driver/__init__.py create mode 100644 src/mir_driver/mir_driver.py create mode 100644 src/mir_rest_node.py create mode 100644 tests/test_base.py create mode 100644 tests/test_module.py create mode 100644 tests/workcell_defs/test_workcell.yaml create mode 100644 tests/workflow_defs/test_workflow.yaml create mode 100644 wei.compose.yaml diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml new file mode 100644 index 0000000..bcc6674 --- /dev/null +++ b/.github/workflows/docker-build.yml @@ -0,0 +1,76 @@ +name: Docker Build Only + +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +on: + push: + branches: [ "!main" ] + pull_request: + branches: [ "*" ] + + +env: + # Use docker.io for Docker Hub if empty + REGISTRY: ghcr.io + # github.repository as / + IMAGE_NAME: ${{ github.repository }} + + +jobs: + docker_build: + + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + # Login against a Docker registry except on PR + # https://github.com/docker/login-action + - name: Log into registry ${{ env.REGISTRY }} + if: github.event_name != 'pull_request' + uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + # Extract metadata (tags, labels) for Docker + # https://github.com/docker/metadata-action + - name: Extract Docker metadata + id: meta + uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934 # v5.0.0 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + type=schedule + type=ref,event=branch + type=ref,event=tag + type=ref,event=pr + type=raw,value=latest,enable={{is_default_branch}} + + # Build and push Docker image with Buildx (don't push on PR) + # https://github.com/docker/build-push-action + - name: Build Docker image + id: build-and-push + uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # v5.0.0 + with: + context: . + push: false + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + platforms: linux/amd64,linux/arm64 + #cache-from: type=gha + #cache-to: type=gha,mode=max diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml new file mode 100644 index 0000000..d26b14f --- /dev/null +++ b/.github/workflows/docker-publish.yml @@ -0,0 +1,74 @@ +name: Docker Build and Publish + +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +on: + push: + branches: [ "main" ] + # Publish semver tags as releases. + tags: [ '*' ] + +env: + # Use docker.io for Docker Hub if empty + REGISTRY: ghcr.io + # github.repository as / + IMAGE_NAME: ${{ github.repository }} + + +jobs: + build_and_publish: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + # Login against a Docker registry except on PR + # https://github.com/docker/login-action + - name: Log into registry ${{ env.REGISTRY }} + if: github.event_name != 'pull_request' + uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d # v3.0.0 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + # Extract metadata (tags, labels) for Docker + # https://github.com/docker/metadata-action + - name: Extract Docker metadata + id: meta + uses: docker/metadata-action@96383f45573cb7f253c731d3b3ab81c87ef81934 # v5.0.0 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + type=schedule + type=ref,event=branch + type=ref,event=tag + type=ref,event=pr + type=raw,value=latest,enable={{is_default_branch}} + + # Build and push Docker image with Buildx (don't push on PR) + # https://github.com/docker/build-push-action + - name: Build and push Docker image + id: build-and-push + uses: docker/build-push-action@0565240e2d4ab88bba5387d719585280857ece09 # v5.0.0 + with: + context: . + push: true + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + platforms: linux/amd64,linux/arm64 + #cache-from: type=gha + #cache-to: type=gha,mode=max diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml new file mode 100644 index 0000000..4d9a5b4 --- /dev/null +++ b/.github/workflows/pre-commit.yml @@ -0,0 +1,15 @@ +name: Pre-Commit Checks + +on: + push: + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + name: Checkout code + - uses: actions/setup-python@v3 + name: Setup Python + - uses: pre-commit/action@v3.0.0 + name: Run Pre-Commit Checks diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 0000000..76bd801 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,19 @@ +name: Pytest Unit Tests + +on: [push] + +jobs: + build_and_test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + name: Checkout code + - name: Test makefile + run: make init .env + - name: Check .env file + run: cat .env + - name: Create paths + run: make paths + - name: Initialize, Build, and Test with Docker + run: make test + shell: bash diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..e6ef6f2 --- /dev/null +++ b/.gitignore @@ -0,0 +1,162 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +.vscode diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..67b8598 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,26 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.5.0 + hooks: + - id: check-yaml + - id: check-json + - id: check-toml + - id: check-ast + - id: check-merge-conflict + - id: check-added-large-files + - id: mixed-line-ending + - id: end-of-file-fixer + - id: trailing-whitespace + - repo: https://github.com/kynan/nbstripout + rev: 0.7.1 + hooks: + - id: nbstripout + - repo: https://github.com/astral-sh/ruff-pre-commit + # Ruff version. + rev: v0.3.0 + hooks: + # Run the linter. + - id: ruff + args: [--fix] + # Run the formatter. + - id: ruff-format diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..bce6070 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,128 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, religion, or sexual identity +and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the + overall community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or + advances of any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email + address, without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +kyle.hippe@gmail.com. +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series +of actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or +permanent ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within +the community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.0, available at +https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +Community Impact Guidelines were inspired by [Mozilla's code of conduct +enforcement ladder](https://github.com/mozilla/diversity). + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see the FAQ at +https://www.contributor-covenant.org/faq. Translations are available at +https://www.contributor-covenant.org/translations. diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..f735590 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,23 @@ +FROM ghcr.io/ad-sdl/wei + +LABEL org.opencontainers.image.source=https://github.com/AD-SDL/mir_module +LABEL org.opencontainers.image.description="Drivers and REST API's for the mir plate handler robots" +LABEL org.opencontainers.image.licenses=MIT + +######################################### +# Module specific logic goes below here # +######################################### + +RUN mkdir -p mir_module + +COPY ./src mir_module/src +COPY ./README.md mir_module/README.md +COPY ./pyproject.toml mir_module/pyproject.toml +COPY ./tests mir_module/tests + +RUN --mount=type=cache,target=/root/.cache \ + pip install -e ./mir_module + +CMD ["python", "mir_module/src/mir_rest_node.py"] + +######################################### diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..1fa1d60 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 UChicago-Argonne, LLC. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..53f3fd0 --- /dev/null +++ b/Makefile @@ -0,0 +1,34 @@ +# Python Configuration +PYPROJECT_TOML := pyproject.toml +PROJECT_VERSION := $(shell grep -oP '(?<=version = ")[^"]+' $(PYPROJECT_TOML) | head -n 1) + +.DEFAULT_GOAL := init + +.PHONY += init paths checks test hardware_test clean +init: # Do the initial configuration of the project + @test -e .env || cp example.env .env + @sed -i 's/^PROJECT_VERSION=.*/PROJECT_VERSION=$(PROJECT_VERSION)/' .env + @sed -i 's/^PROJECT_PATH=.*/PROJECT_PATH=$(shell pwd | sed 's/\//\\\//g')/' .env + +.env: init + +paths: .env # Create the necessary data directories + @mkdir -p $(shell grep -E '^WEI_DATA_DIR=' .env | cut -d '=' -f 2) + @mkdir -p $(shell grep -E '^REDIS_DIR=' .env | cut -d '=' -f 2) + +checks: # Runs all the pre-commit checks + @pre-commit install + @pre-commit run --all-files || { echo "Checking fixes\n" ; pre-commit run --all-files; } + +test: init .env paths # Runs all the tests + @docker compose -f wei.compose.yaml --env-file .env up --build -d + @docker compose -f wei.compose.yaml --env-file .env exec mir_module pytest -p no:cacheprovider -m "not hardware" mir_module + @docker compose -f wei.compose.yaml --env-file .env down + +# hardware_test: init .env paths # Runs all the tests +# @docker compose -f wei.compose.yaml --env-file .env up --build -d +# @docker compose -f wei.compose.yaml --env-file .env exec mir_module pytest -p no:cacheprovider -m "hardware" mir_module +# @docker compose -f wei.compose.yaml --env-file .env down + +clean: + @rm .env diff --git a/compose.yaml b/compose.yaml new file mode 100644 index 0000000..bd7ad2f --- /dev/null +++ b/compose.yaml @@ -0,0 +1,19 @@ +name: mir_module +services: + mir_module: + container_name: mir_module + image: ${IMAGE} + build: + context: . + dockerfile: Dockerfile + tags: + - ${IMAGE}:latest + - ${IMAGE}:${PROJECT_VERSION} + - ${IMAGE}:dev + command: python -m mir_rest_node --port 3000 --alias ${MIR_ALIAS} --mir_url ${MIR_URL} --mir_key ${MIR_KEY} + env_file: .env + volumes: + - ./src:/home/app/mir_module/src + - ./tests:/home/app/mir_module/tests + ports: + - 3000:3000 diff --git a/example.env b/example.env new file mode 100644 index 0000000..31d2bfc --- /dev/null +++ b/example.env @@ -0,0 +1,11 @@ +# Note: all paths are relative to the docker compose file +MIR_ALIAS="mir250_base_1" +MIR_URL="146.137.240.35" +MIR_KEY=10100 +PROJECT_PATH= +PROJECT_VERSION= +WEI_DATA_DIR=~/.wei +WORKCELL_FILENAME=test_workcell.yaml +WORKCELLS_DIR=${PROJECT_PATH}/tests/workcell_defs +IMAGE=ghcr.io/ad-sdl/mir_module +REDIS_DIR=~/.wei/redis diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..4156ea0 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,137 @@ +[project] +name = "mir_module" +version = "0.2.0" +description = "Software for automatting a mir robot" +authors = [ + {name = "Ryan D. Lewis", email="ryan.lewis@anl.gov"}, + {name = "Rafael Vescovi", email="ravescovi@anl.gov"}, +] +dependencies = [ + "ad_sdl.wei>=0.5.3", + "fastapi>=0.103.2", + "uvicorn>=0.21.1", + "pytest", +] +requires-python = ">=3.8.1" +readme = "README.md" +license = {text = "MIT"} + +[project.urls] +homepage = "https://github.com/AD-SDL/mir_module" + +###################### +# Build Info + Tools # +###################### +[build-system] +requires = ["setuptools>=61", "wheel"] +build-backend = "setuptools.build_meta" + +##################### +# Development Tools # +##################### + +[tool.ruff] +# https://docs.astral.sh/ruff/configuration/ + +# Exclude a variety of commonly ignored directories. +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".mypy_cache", + ".nox", + ".pants.d", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "venv", + "docs", +] + +# Same as Black. +line-length = 88 +indent-width = 4 + +# Assume Python 3.8 +target-version = "py38" + +[tool.ruff.lint] +# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. +select = [ + # pycodestyle + "E", + # Pyflakes + "F", + # pyupgrade + # "UP", + # flake8-bugbear + "B", + # flake8-simplify + # "SIM", + # isort + "I", + # Warning + "W", + # pydocstyle + "D100", "D101", "D102", "D103", "D104", "D105", "D106", "D107", + # ruff + # "RUF" +] +ignore = [ + "E501", # Line too long + "B006", # Do not use mutable data structures for argument defaults +] + +# Allow fix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +unfixable = [] + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +[tool.ruff.format] +# Like Black, use double quotes for strings. +quote-style = "double" + +# Like Black, indent with spaces, rather than tabs. +indent-style = "space" + +# Like Black, respect magic trailing commas. +skip-magic-trailing-comma = false + +# Like Black, automatically detect the appropriate line ending. +line-ending = "auto" + +[tool.pytest.ini_options] +# https://docs.pytest.org/en/stable/customize.html +addopts = "-x" +junit_family="xunit1" +filterwarnings = [ + "ignore::DeprecationWarning", + "ignore::pottery.exceptions.InefficientAccessWarning", +] +markers = [ + "hardware: marks test as requiring hardware (deselect with '-m \"not hardware\"')", +] + +[tool.mypy] +# https://mypy.readthedocs.io/en/stable/config_file.html#using-a-pyproject-toml +show_error_codes = true +check_untyped_defs = true +follow_imports = "normal" +strict_optional = true +plugins = ["pydantic.mypy"] +strict = true +disallow_untyped_defs = true +implicit_reexport = true diff --git a/src/mir_driver/__init__.py b/src/mir_driver/__init__.py new file mode 100644 index 0000000..c82fbfd --- /dev/null +++ b/src/mir_driver/__init__.py @@ -0,0 +1 @@ +"""Python driver for controlling the PF400 over HTTP""" diff --git a/src/mir_driver/mir_driver.py b/src/mir_driver/mir_driver.py new file mode 100644 index 0000000..74a5e2a --- /dev/null +++ b/src/mir_driver/mir_driver.py @@ -0,0 +1,147 @@ +#!/usr/bin/env python3 +"""Driver code for the PF400 robot arm.""" + + +import requests +import json + +from requests.api import post +from pprint import pprint + + +class MiR_Base: + """Main Driver Class for the MiR Robotic base.""" + + def __init__( + self, + mir_ip="mirbase2.cels.anl.gov", + mir_key="Basic RGlzdHJpYnV0b3I6NjJmMmYwZjFlZmYxMGQzMTUyYzk1ZjZmMDU5NjU3NmU0ODJiYjhlNDQ4MDY0MzNmNGNmOTI5NzkyODM0YjAxNA==", + map_name=None, + ): + """ + Description: + """ + self.mir_ip = mir_ip + self.mir_key = mir_key + self.host = "http://" + self.mir_ip + "/api/v2.0.0/" + # format the headers + self.headers = {} + self.headers["Content-Type"] = "application/json" + self.headers["Authorization"] = self.mir_key + + ## + self.map_name = map_name + self.current_map = self.get_map() + self.map_guid = self.current_map["guid"] + + def get_map(self): + get_maps = requests.get(self.host + "maps", headers=self.headers) + maps = json.loads(get_maps.text) + if not self.map_name: + print("No map_name, using [0]") + else: + ## TODO: get case where map is not found + current_map = list(filter(lambda map: map["name"] == self.map_name, maps)) + if not current_map: + current_map = maps[0] + print("Current Map:", current_map[0]) + return current_map[0] + + def get_positions(self): + get_positions_by_map = requests.get( + self.host + "maps/" + self.current_map["guid"] + "/positions", + headers=self.headers, + ) + positions = json.loads(get_positions_by_map.text) + pprint(positions) + return positions + + # def get_actions(self): + # get_actions = requests.get( + # self.host + "actions", + # headers=self.headers, + # ) + # all_actions = json.loads(get_actions.text) + # pprint(all_actions) + # dets = requests.get(self.host + "actions/move", headers=self.headers) + # pprint(json.loads(dets.text)) + # return all_actions + + def goto_positions(self, position=None): + pass + + def list_missions(self): + get_missions = requests.get(self.host + "missions", headers=self.headers) + all_missions = json.loads(get_missions.text) + return all_missions + + def run_mission(self): + pass + + def post_mission(self, mission_name="", mission_vars=[]): + """Function to use when you wish to post a mission to the queue + Arguments: + mission_name: the name you set in your Web Interface + """ + all_missions = self.list_missions() + for i in range(len(all_missions)): + # print(all_missions[i]["name"]) + if all_missions[i]["name"] == mission_name: + mission_id_temp = all_missions[i]["guid"] + dets = requests.get( + self.host + "missions/" + mission_id_temp + "/actions", + headers=self.headers, + ) + pprint(json.loads(dets.text)) + + dets = requests.get( + self.host + "missions/" + mission_id_temp, headers=self.headers + ) + pprint(json.loads(dets.text)) + + mission_json = {"mission_id": mission_id_temp, "parameters": mission_vars} + mission = requests.post( + self.host + "mission_queue", json=mission_json, headers=self.headers + ) + print(mission.text) + + def delete_mission(): + """delete all the missions""" + return requests.delete(self.host + "mission_queue", headers=self.headers) + + def check_completion(): + """check whether all the missions in the queue has completed or not""" + status = False + while status is False: + check_mission_status = requests.get( + self.host + "mission_queue", headers=self.headers + ) + response_native = json.loads(check_mission_status.text) + status_string = response_native[-1]["state"] + if status_string == "Done": + status = True + else: + status = False + + +if __name__ == "__main__": + mir_base = MiR_Base(map_name="RPL") + # pprint(mir_base.list_missions()) + ### mir_base.get_actions() + # mir_base.get_positions() + mir_base.post_mission(mission_name="DockCharger1") + # mir_base.post_mission(mission_name="GoToCamera") + # mir_base.post_mission(mission_name="Move") + # mir_base.post_mission( + # mission_name="GoToPositionPrototype", + # mission_vars=[ + # { + # "x": 19.5, + # "y": 19.5, + # "orientation": 90.0, + # # "retries": 10, + # # "distance_threshold": 0.25, + # } + # ], + # ) +# https://mirbase2.cels.anl.gov/?x=10.1&y=19.5&orientation=-90.00&mode=map-go-to-coordinates diff --git a/src/mir_rest_node.py b/src/mir_rest_node.py new file mode 100644 index 0000000..c372820 --- /dev/null +++ b/src/mir_rest_node.py @@ -0,0 +1,170 @@ +#! /usr/bin/env python3 +"""The server for the PF400 robot that takes incoming WEI flow requests from the experiment application""" + +import datetime +import json +import traceback +from argparse import ArgumentParser, Namespace +from contextlib import asynccontextmanager +from pathlib import Path +from time import sleep + +from fastapi import FastAPI +from fastapi.responses import JSONResponse +from wei.core.data_classes import ( + ModuleAbout, + ModuleAction, + ModuleActionArg, +) +from wei.helpers import extract_version + + +def parse_args() -> Namespace: + """Parses the command line arguments for the PF400 REST node""" + parser = ArgumentParser() + parser.add_argument("--alias", type=str, help="Name of the Node", default="pf400") + parser.add_argument("--host", type=str, help="Host for rest", default="0.0.0.0") + parser.add_argument("--port", type=int, help="port value") + return parser.parse_args() + + +global pf400_ip, pf400_port, state, action_start + + +@asynccontextmanager +async def lifespan(app: FastAPI): + """Initial run function for the app, parses the workcell argument + Parameters + ---------- + app : FastApi + The REST API app being initialized + + Returns + ------- + None""" + global pf400, state, pf400_ip, pf400_port + + args = parse_args() + pf400_ip = args.pf400_ip + pf400_port = args.pf400_port + + try: + pf400 = PF400(pf400_ip, pf400_port) + pf400.initialize_robot() + state = "IDLE" + except Exception: + state = "ERROR" + traceback.print_exc() + else: + print("PF400 online") + yield + + # Do any cleanup here + pass + + +app = FastAPI( + lifespan=lifespan, +) + + +def check_state(): + """Updates the MiR state + + Parameters: + ----------- + None + Returns + ------- + None + """ + pass + + +@app.get("/state") +def state(): + """Returns the current state of the Pf400 module""" + global state, action_start + if not (state == "BUSY") or ( + action_start + and (datetime.datetime.now() - action_start > datetime.timedelta(0, 2)) + ): + check_state() + return JSONResponse(content={"State": state}) + + +@app.get("/resources") +async def resources(): + """Returns info about the resources the module has access to""" + global pf400 + return JSONResponse(content={"State": pf400.get_status()}) + + +@app.get("/about") +async def about() -> JSONResponse: + """Returns a description of the actions and resources the module supports""" + global state + about = ModuleAbout( + name="Pf400 Robotic Arm", + model="Precise Automation PF400", + description="pf400 is a robot module that moves plates between two robot locations.", + interface="wei_rest_node", + version=extract_version(Path(__file__).parent.parent / "pyproject.toml"), + actions=[ + ModuleAction( + name="transfer", + description="This action transfers a plate from a source robot location to a target robot location.", + args=[ + ModuleActionArg( + name="source", + description="Source location in the workcell for pf400 to grab plate from.", + type="str", + required=True, + ), + ], + ), + ], + resource_pools=[], + ) + return JSONResponse(content=about.model_dump(mode="json")) + + +@app.post("/action") +def do_action(action_handle: str, action_vars: str): + """Executes the action requested by the user""" + response = {"action_response": "", "action_msg": "", "action_log": ""} + print(action_vars) + global pf400, state, action_start + if state == "BUSY": + return + action_start = datetime.datetime.now() + if state == "PF400 CONNECTION ERROR": + response["action_response"] = "failed" + response["action_log"] = "Connection error, cannot accept a job!" + return response + + vars = json.loads(action_vars) + + err = False + state = "BUSY" + if action_handle == "mission": + print("test_mission") + else: + msg = "UNKNOWN ACTION REQUEST! Available actions: mission" + response["action_response"] = "failed" + response["action_log"] = msg + return response + + +if __name__ == "__main__": + import uvicorn + + args = parse_args() + + uvicorn.run( + "mirbase_rest_node:app", + host=args.host, + port=args.port, + reload=True, + ws_max_size=100000000000000000000000000000000000000, + ) diff --git a/tests/test_base.py b/tests/test_base.py new file mode 100644 index 0000000..daaddda --- /dev/null +++ b/tests/test_base.py @@ -0,0 +1,25 @@ +"""Base module tests.""" + +import unittest + + +class TestModule_Base(unittest.TestCase): + """Base test class for this module.""" + + pass + + +class TestImports(TestModule_Base): + """Test the imports of the module are working correctly""" + + def test_driver_import(self): + """Test the driver and rest node imports""" + import mir_driver + import mir_rest_node + + assert mir_driver + assert mir_rest_node + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_module.py b/tests/test_module.py new file mode 100644 index 0000000..db0dd40 --- /dev/null +++ b/tests/test_module.py @@ -0,0 +1,61 @@ +"""Tests the basic functionality of the Module.""" + +import time +import unittest +from pathlib import Path + +import requests +from wei.core.data_classes import ModuleAbout, WorkcellData + + +class TestWEI_Base(unittest.TestCase): + """Base class for WEI's pytest tests""" + + def __init__(self, *args, **kwargs): + """Basic setup for WEI's pytest tests""" + super().__init__(*args, **kwargs) + self.root_dir = Path(__file__).resolve().parent.parent + self.workcell_file = self.root_dir / Path( + "tests/workcell_defs/test_workcell.yaml" + ) + self.workcell = WorkcellData.from_yaml(self.workcell_file) + self.server_host = self.workcell.config.server_host + self.server_port = self.workcell.config.server_port + self.url = f"http://{self.server_host}:{self.server_port}" + self.module_url = "http://mir_module:3000" + self.redis_host = self.workcell.config.redis_host + + # Check to see that server is up + start_time = time.time() + while True: + try: + if requests.get(self.url + "/wc/state").status_code == 200: + break + except Exception: + pass + time.sleep(1) + if time.time() - start_time > 60: + raise TimeoutError("Server did not start in 60 seconds") + while True: + try: + if requests.get(self.module_url + "/state").status_code == 200: + break + except Exception: + pass + time.sleep(1) + if time.time() - start_time > 60: + raise TimeoutError("Module did not start in 60 seconds") + + +class TestModuleInterfaces(TestWEI_Base): + """Tests the basic functionality of the Module.""" + + def test_module_about(self): + """Tests that the module's /about endpoint works""" + response = requests.get(self.module_url + "/about") + assert response.status_code == 200 + ModuleAbout(**response.json()) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/workcell_defs/test_workcell.yaml b/tests/workcell_defs/test_workcell.yaml new file mode 100644 index 0000000..c7feb7b --- /dev/null +++ b/tests/workcell_defs/test_workcell.yaml @@ -0,0 +1,19 @@ +name: Test_Workcell + +#Info about data processing and location of the workcell +config: + workcell_origin_coordinates: [0, 0, 0, 0, 0, 0] + redis_host: "wei_redis" + server_host: "wei_server" + server_port: 8000 + +#List of all components accessible in this workcell +modules: + + - name: mir_module + model: MiR 250 + interface: wei_rest_node + config: + rest_node_address: "http://mir_module:3000" + workcell_coordinates: [0, 0, 0, 0, 0, 0] +locations: {} diff --git a/tests/workflow_defs/test_workflow.yaml b/tests/workflow_defs/test_workflow.yaml new file mode 100644 index 0000000..223d7e0 --- /dev/null +++ b/tests/workflow_defs/test_workflow.yaml @@ -0,0 +1,15 @@ +name: Test mir_module Module +metadata: + author: Doga Ozgulbas + info: Tests the functionality of the mir Module + version: 0.1 + +modules: + - name: mir_module + +flowdef: + - name: Transfer + module: mir_module + action: action + args: + action_id: 'action1' \ No newline at end of file diff --git a/wei.compose.yaml b/wei.compose.yaml new file mode 100644 index 0000000..c00725f --- /dev/null +++ b/wei.compose.yaml @@ -0,0 +1,48 @@ +include: [compose.yaml] +##################### +# WEI Core Services # +##################### +services: + wei_server: + image: ghcr.io/ad-sdl/wei + container_name: wei_server + ports: + - 8000:8000 + env_file: .env + environment: + - PYTHONUNBUFFERED=1 # Fix weird bug with empty logging + volumes: + - ${WORKCELLS_DIR}:/workcell_defs + - ${WEI_DATA_DIR}:/home/app/.wei + - diaspora_config:/home/app/.diaspora + command: python3 -m wei.server --workcell /workcell_defs/${WORKCELL_FILENAME} + depends_on: + - wei_redis + wei_engine: + image: ghcr.io/ad-sdl/wei + container_name: wei_engine + volumes: + - ${WORKCELLS_DIR}:/workcell_defs + - ${WEI_DATA_DIR}:/home/app/.wei + env_file: .env + environment: + - PYTHONUNBUFFERED=1 # Fix weird bug with empty logging + command: python3 -m wei.engine --workcell /workcell_defs/${WORKCELL_FILENAME} + depends_on: + - wei_redis + - wei_server + wei_redis: + image: redis + container_name: wei_redis + ports: + - 6379:6379 + volumes: + - ${REDIS_DIR}:/data + command: redis-server --save 60 1 --loglevel warning + +################ +# Data Storage # +################ +volumes: + diaspora_config: + driver: local