diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..dc0fcfe --- /dev/null +++ b/.gitignore @@ -0,0 +1,161 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ +venv*/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +.idea/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..a572649 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,30 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v2.3.0 + hooks: + - id: check-yaml + - id: end-of-file-fixer + - id: trailing-whitespace + - repo: https://github.com/psf/black + rev: 23.12.0 + hooks: + - id: black + - repo: https://github.com/charliermarsh/ruff-pre-commit + rev: 'v0.1.8' + hooks: + - id: ruff + - repo: https://github.com/jazzband/pip-tools + rev: 7.3.0 + hooks: + - id: pip-compile + name: pip-compile requirements.txt + args: [--strip-extras, --output-file=requirements.txt] + files: ^(pyproject\.toml|requirements\.txt)$ + - id: pip-compile + name: pip-compile requirements-test.txt + args: [--extra=test, --strip-extras, --output-file=requirements-test.txt] + files: ^(pyproject\.toml|requirements-test\.txt)$ + - id: pip-compile + name: pip-compile requirements-dev.txt + args: [--extra=dev, --strip-extras, --output-file=requirements-dev.txt] + files: ^(pyproject\.toml|requirements-dev\.txt)$ diff --git a/Dockerfile b/Dockerfile index 6805499..2b3f2dc 100644 --- a/Dockerfile +++ b/Dockerfile @@ -8,6 +8,13 @@ ENV PYTHONUNBUFFERED 1 RUN addgroup -S app && adduser -S app -G app WORKDIR /home/app +# Install requirements to build aiokafka +RUN apk add --no-cache \ + gcc \ + python3-dev \ + libc-dev \ + zlib-dev + # Copy and install requirements only first to cache the dependency layer COPY --chown=app:app requirements.txt . RUN pip install --no-cache-dir --no-compile --upgrade -r requirements.txt diff --git a/app.py b/app.py index f9daf03..09e04c1 100755 --- a/app.py +++ b/app.py @@ -21,8 +21,12 @@ from endpoints import AsyncRequestHandler as RequestHandler # TODO: for testing, add better defaults (or remove completely to make sure it is set in env) -ENDPOINT_CONFIG_URL = os.getenv("ENDPOINT_CONFIG_URL", "http://127.0.0.1:8000/api/v1/hosts/localhost/") -DEVICE_REGISTRY_TOKEN = os.getenv("DEVICE_REGISTRY_TOKEN", "abcdef1234567890abcdef1234567890abcdef12") +ENDPOINT_CONFIG_URL = os.getenv( + "ENDPOINT_CONFIG_URL", "http://127.0.0.1:8000/api/v1/hosts/localhost/" +) +DEVICE_REGISTRY_TOKEN = os.getenv( + "DEVICE_REGISTRY_TOKEN", "abcdef1234567890abcdef1234567890abcdef12" +) device_registry_request_headers = { "Authorization": f"Token {DEVICE_REGISTRY_TOKEN}", @@ -44,22 +48,34 @@ async def get_endpoints_from_device_registry(fail_on_error: bool) -> dict: # Create request to ENDPOINTS_URL and get data using httpx async with httpx.AsyncClient() as client: try: - response = await client.get(ENDPOINT_CONFIG_URL, headers=device_registry_request_headers) + response = await client.get( + ENDPOINT_CONFIG_URL, headers=device_registry_request_headers + ) if response.status_code == 200: data = response.json() - logging.info(f"Got {len(data['endpoints'])} endpoints from device registry {ENDPOINT_CONFIG_URL}") + logging.info( + f"Got {len(data['endpoints'])} endpoints from device registry {ENDPOINT_CONFIG_URL}" + ) else: - logging.error(f"Failed to get endpoints from device registry {ENDPOINT_CONFIG_URL}") + logging.error( + f"Failed to get endpoints from device registry {ENDPOINT_CONFIG_URL}" + ) return endpoints except Exception as e: - logging.error(f"Failed to get endpoints from device registry {ENDPOINT_CONFIG_URL}: {e}") + logging.error( + f"Failed to get endpoints from device registry {ENDPOINT_CONFIG_URL}: {e}" + ) if fail_on_error: raise e for endpoint in data["endpoints"]: # Import requesthandler module. It must exist in python path. try: - request_handler_module = importlib.import_module(endpoint["http_request_handler"]) - request_handler_function: RequestHandler = request_handler_module.RequestHandler() + request_handler_module = importlib.import_module( + endpoint["http_request_handler"] + ) + request_handler_function: RequestHandler = ( + request_handler_module.RequestHandler() + ) endpoint["request_handler"] = request_handler_function logging.info(f"Imported {endpoint['http_request_handler']}") except ImportError as e: @@ -97,18 +113,22 @@ async def trigger_error(_request: Request) -> Response: async def api_v2(request: Request, endpoint: dict) -> Response: - request_data = await extract_data_from_starlette_request(request) # data validation done here + request_data = await extract_data_from_starlette_request( + request + ) # data validation done here # TODO : remove # DONE # logging.error(request_data) if request_data.get("extra"): logging.warning(f"RequestModel contains extra values: {request_data['extra']}") if request_data["request"].get("extra"): - logging.warning(f"RequestData contains extra values: {request_data['request']['extra']}") + logging.warning( + f"RequestData contains extra values: {request_data['request']['extra']}" + ) path = request_data["path"] - (auth_ok, device_id, topic_name, response_message, status_code) = await endpoint["request_handler"].process_request( - request_data, endpoint - ) + (auth_ok, device_id, topic_name, response_message, status_code) = await endpoint[ + "request_handler" + ].process_request(request_data, endpoint) response_message = str(response_message) print("REMOVE ME", auth_ok, device_id, topic_name, response_message, status_code) # add extracted device id to request data before pushing to kafka raw data topic @@ -130,7 +150,10 @@ async def api_v2(request: Request, endpoint: dict) -> Response: f'Failed to send "{path}" data to {topic_name}, producer was not initialised even we had a topic name' ) # Endpoint process has failed and no data was sent to Kafka. This is a fatal error. - response_message, status_code = "Internal server error, see logs for details", 500 + response_message, status_code = ( + "Internal server error, see logs for details", + 500, + ) else: logging.info("No action: topic_name is not defined") @@ -165,7 +188,11 @@ async def startup(): except Exception as e: logging.error(f"Failed to create KafkaProducer: {e}") app.producer = None - logging.info("Ready to go, listening to endpoints: {}".format(", ".join(app.endpoints.keys()))) + logging.info( + "Ready to go, listening to endpoints: {}".format( + ", ".join(app.endpoints.keys()) + ) + ) async def shutdown(): @@ -184,7 +211,11 @@ async def shutdown(): APIRoute("/readiness", endpoint=readiness, methods=["GET", "HEAD"]), APIRoute("/healthz", endpoint=healthz, methods=["GET", "HEAD"]), APIRoute("/debug-sentry", endpoint=trigger_error, methods=["GET", "HEAD"]), - APIRoute("/{full_path:path}", endpoint=catch_all, methods=["HEAD", "GET", "POST", "PUT", "PATCH", "DELETE"]), + APIRoute( + "/{full_path:path}", + endpoint=catch_all, + methods=["HEAD", "GET", "POST", "PUT", "PATCH", "DELETE"], + ), ] diff --git a/azure-build-main.yml b/azure-build-main.yml index cfbd45a..85d32af 100644 --- a/azure-build-main.yml +++ b/azure-build-main.yml @@ -1,4 +1,4 @@ -# Continuous integration (CI) triggers cause a pipeline to run whenever you push +# Continuous integration (CI) triggers cause a pipeline to run whenever you push # an update to the specified branches or you push specified tags. trigger: batch: true @@ -9,15 +9,15 @@ trigger: exclude: - README.md -# Pull request (PR) triggers cause a pipeline to run whenever a pull request is -# opened with one of the specified target branches, or when updates are made to +# Pull request (PR) triggers cause a pipeline to run whenever a pull request is +# opened with one of the specified target branches, or when updates are made to # such a pull request. # -# GitHub creates a new ref when a pull request is created. The ref points to a -# merge commit, which is the merged code between the source and target branches +# GitHub creates a new ref when a pull request is created. The ref points to a +# merge commit, which is the merged code between the source and target branches # of the pull request. # -# Opt out of pull request validation +# Opt out of pull request validation pr: none # By default, use self-hosted agents diff --git a/endpoints/__init__.py b/endpoints/__init__.py index ed949f1..f9f4d66 100644 --- a/endpoints/__init__.py +++ b/endpoints/__init__.py @@ -28,12 +28,17 @@ def log_match(header: str, ip: str, allowed_network): r_ip = request_data["request"]["headers"].get("x-real-ip") if ( r_ip - and ipaddress.ip_address(request_data["request"]["headers"].get("x-real-ip", "")) in allowed_network + and ipaddress.ip_address( + request_data["request"]["headers"].get("x-real-ip", "") + ) + in allowed_network ): log_match("x-real-ip", r_ip, allowed_network) return True - forwarded_for_ips = request_data["request"]["headers"].get("x-forwarded-for", "").split(",") + forwarded_for_ips = ( + request_data["request"]["headers"].get("x-forwarded-for", "").split(",") + ) for r_ip in forwarded_for_ips: r_ip = r_ip.strip() if a_ip: @@ -73,14 +78,20 @@ async def validate( api_key = request_data["request"]["headers"].get("x-api-key") if api_key is None or api_key != endpoint_data["auth_token"]: logging.warning("Missing or invalid authentication token (x-api-key)") - return False, "Missing or invalid authentication token, see logs for error", 401 + return ( + False, + "Missing or invalid authentication token, see logs for error", + 401, + ) logging.info("Authentication token validated") if request_data["request"]["get"].get("test") == "true": logging.info("Test ok") return False, "Test OK", 400 allowed_ip_addresses = endpoint_data.get("allowed_ip_addresses", "") if allowed_ip_addresses == "": - logging.warning("Set 'allowed_ip_addresses' in endpoint settings to restrict requests unknown sources") + logging.warning( + "Set 'allowed_ip_addresses' in endpoint settings to restrict requests unknown sources" + ) else: if is_ip_address_allowed(request_data, allowed_ip_addresses) is False: return False, "IP address not allowed", 403 diff --git a/endpoints/default/apikeyauth.py b/endpoints/default/apikeyauth.py index 97c8954..5d88fd7 100644 --- a/endpoints/default/apikeyauth.py +++ b/endpoints/default/apikeyauth.py @@ -27,7 +27,9 @@ async def process_request( Just do minimal validation for request_data and return ok if token was valid. """ - auth_ok, response_message, status_code = await self.validate(request_data, endpoint_data) + auth_ok, response_message, status_code = await self.validate( + request_data, endpoint_data + ) if auth_ok: topic_name = endpoint_data["kafka_raw_data_topic"] response_message = "Request OK" diff --git a/endpoints/digita/aiothingpark.py b/endpoints/digita/aiothingpark.py index 51fa467..c9590bf 100644 --- a/endpoints/digita/aiothingpark.py +++ b/endpoints/digita/aiothingpark.py @@ -16,7 +16,9 @@ async def validate( :param endpoint_data: endpoint data from device registry :return: (bool ok, str error text, int status code) """ - [status_ok, response_message, status_code] = await super().validate(request_data, endpoint_data) + [status_ok, response_message, status_code] = await super().validate( + request_data, endpoint_data + ) if status_ok is False: return False, response_message, status_code @@ -32,13 +34,17 @@ async def process_request( request_data: dict, endpoint_data: dict, ) -> Tuple[bool, str, Union[str, None], Union[str, dict, list], int]: - auth_ok, response_message, status_code = await self.validate(request_data, endpoint_data) + auth_ok, response_message, status_code = await self.validate( + request_data, endpoint_data + ) device_id = request_data["request"]["get"].get("LrnDevEui") if device_id: # a LrnDevEui must be present to send the data to Kafka topic topic_name = endpoint_data["kafka_raw_data_topic"] else: topic_name = None - logging.info("Validation: {}, {}, {}".format(auth_ok, response_message, status_code)) + logging.info( + "Validation: {}, {}, {}".format(auth_ok, response_message, status_code) + ) return auth_ok, device_id, topic_name, response_message, status_code async def get_metadata(self, request_data: dict, device_id: str) -> str: diff --git a/endpoints/sentilo/cesva.py b/endpoints/sentilo/cesva.py index 3592db5..65077ee 100644 --- a/endpoints/sentilo/cesva.py +++ b/endpoints/sentilo/cesva.py @@ -17,14 +17,18 @@ async def validate( :param endpoint_data: endpoint data from device registry :return: (bool ok, str error text, int status code) """ - [status_ok, response_message, status_code] = await super().validate(request_data, endpoint_data) + [status_ok, response_message, status_code] = await super().validate( + request_data, endpoint_data + ) if status_ok is False: return False, response_message, status_code try: # check if device id can be extracted - json.loads(request_data["request"]["body"].decode("utf-8"))["sensors"][0]["sensor"][0:-2] + json.loads(request_data["request"]["body"].decode("utf-8"))["sensors"][0][ + "sensor" + ][0:-2] return True, "Request accepted", 202 except Exception: logging.warning("unable to retreive device_id from request body") @@ -35,11 +39,17 @@ async def process_request( request_data: dict, endpoint_data: dict, ) -> Tuple[bool, str, Union[str, None], Union[str, dict, list], int]: - auth_ok, response_message, status_code = await self.validate(request_data, endpoint_data) + auth_ok, response_message, status_code = await self.validate( + request_data, endpoint_data + ) - logging.info("Validation: {}, {}, {}".format(auth_ok, response_message, status_code)) + logging.info( + "Validation: {}, {}, {}".format(auth_ok, response_message, status_code) + ) if auth_ok: - device_id = json.loads(request_data["request"]["body"].decode("utf-8"))["sensors"][0]["sensor"][0:-2] + device_id = json.loads(request_data["request"]["body"].decode("utf-8"))[ + "sensors" + ][0]["sensor"][0:-2] topic_name = endpoint_data["kafka_raw_data_topic"] else: device_id = None diff --git a/pyproject.toml b/pyproject.toml index 899f71e..6e72ee7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,3 @@ -[tool.setuptools] -py-modules = [] - [build-system] requires = ["setuptools"] build-backend = "setuptools.build_meta" @@ -14,12 +11,10 @@ name = "mittaridatapumppu-endpoint" description = "" readme = "README.md" requires-python = ">=3.10" -dynamic = ["version"] - +version = "v0.1.0" dependencies = [ - "aiokafka", "fastapi", - "fvhiot@https://github.com/ForumViriumHelsinki/FVHIoT-python/archive/refs/tags/v0.3.2.zip", + "fvhiot[kafka]@https://github.com/ForumViriumHelsinki/FVHIoT-python/archive/refs/tags/v0.4.1.zip", "httpx", "kafka-python", "python-multipart", diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..2e02222 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,167 @@ +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# pip-compile --extra=dev --output-file=requirements-dev.txt --strip-extras +# +aiohttp==3.9.1 + # via black +aiokafka==0.10.0 + # via fvhiot +aiosignal==1.3.1 + # via aiohttp +annotated-types==0.6.0 + # via pydantic +anyio==3.7.1 + # via + # fastapi + # httpx + # starlette +async-timeout==4.0.3 + # via aiokafka +attrs==23.1.0 + # via aiohttp +autoflake==2.2.1 + # via mittaridatapumppu-endpoint (pyproject.toml) +autopep8==2.0.4 + # via mittaridatapumppu-endpoint (pyproject.toml) +black==23.12.0 + # via mittaridatapumppu-endpoint (pyproject.toml) +certifi==2023.11.17 + # via + # fvhiot + # httpcore + # httpx + # sentry-sdk +cfgv==3.4.0 + # via pre-commit +click==8.1.7 + # via + # black + # uvicorn +coverage==7.3.3 + # via + # coverage + # pytest-cov +distlib==0.3.8 + # via virtualenv +fastapi==0.105.0 + # via mittaridatapumppu-endpoint (pyproject.toml) +filelock==3.13.1 + # via virtualenv +flake8==6.1.0 + # via + # mittaridatapumppu-endpoint (pyproject.toml) + # pep8-naming +frozenlist==1.4.1 + # via + # aiohttp + # aiosignal +fvhiot @ https://github.com/ForumViriumHelsinki/FVHIoT-python/archive/refs/tags/v0.4.1.zip + # via mittaridatapumppu-endpoint (pyproject.toml) +gunicorn==21.2.0 + # via mittaridatapumppu-endpoint (pyproject.toml) +h11==0.14.0 + # via + # httpcore + # uvicorn +httpcore==1.0.2 + # via httpx +httpx==0.25.2 + # via mittaridatapumppu-endpoint (pyproject.toml) +identify==2.5.33 + # via pre-commit +idna==3.6 + # via + # anyio + # httpx + # yarl +iniconfig==2.0.0 + # via pytest +isort==5.13.2 + # via mittaridatapumppu-endpoint (pyproject.toml) +kafka-python==2.0.2 + # via mittaridatapumppu-endpoint (pyproject.toml) +mccabe==0.7.0 + # via flake8 +msgpack==1.0.7 + # via fvhiot +multidict==6.0.4 + # via + # aiohttp + # yarl +mypy-extensions==1.0.0 + # via black +nodeenv==1.8.0 + # via pre-commit +packaging==23.2 + # via + # aiokafka + # black + # gunicorn + # pytest +pathspec==0.12.1 + # via black +pep8-naming==0.13.3 + # via mittaridatapumppu-endpoint (pyproject.toml) +platformdirs==4.1.0 + # via + # black + # virtualenv +pluggy==1.3.0 + # via pytest +pre-commit==3.6.0 + # via mittaridatapumppu-endpoint (pyproject.toml) +pycodestyle==2.11.1 + # via + # autopep8 + # flake8 +pydantic==2.5.2 + # via + # fastapi + # mittaridatapumppu-endpoint (pyproject.toml) +pydantic-core==2.14.5 + # via pydantic +pyflakes==3.1.0 + # via + # autoflake + # flake8 +pytest==7.4.3 + # via + # mittaridatapumppu-endpoint (pyproject.toml) + # pytest-asyncio + # pytest-cov +pytest-asyncio==0.23.2 + # via mittaridatapumppu-endpoint (pyproject.toml) +pytest-cov==4.1.0 + # via mittaridatapumppu-endpoint (pyproject.toml) +python-multipart==0.0.6 + # via mittaridatapumppu-endpoint (pyproject.toml) +pyyaml==6.0.1 + # via pre-commit +sentry-asgi==0.2.0 + # via mittaridatapumppu-endpoint (pyproject.toml) +sentry-sdk==1.39.1 + # via sentry-asgi +sniffio==1.3.0 + # via + # anyio + # httpx +starlette==0.27.0 + # via fastapi +typing-extensions==4.9.0 + # via + # fastapi + # pydantic + # pydantic-core +urllib3==2.1.0 + # via sentry-sdk +uvicorn==0.24.0.post1 + # via mittaridatapumppu-endpoint (pyproject.toml) +virtualenv==20.25.0 + # via pre-commit +yarl==1.9.4 + # via aiohttp + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements-test.txt b/requirements-test.txt new file mode 100644 index 0000000..475d56a --- /dev/null +++ b/requirements-test.txt @@ -0,0 +1,88 @@ +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# pip-compile --extra=test --output-file=requirements-test.txt --strip-extras +# +aiokafka==0.10.0 + # via fvhiot +annotated-types==0.6.0 + # via pydantic +anyio==3.7.1 + # via + # fastapi + # httpx + # starlette +async-timeout==4.0.3 + # via aiokafka +certifi==2023.11.17 + # via + # fvhiot + # httpcore + # httpx + # requests + # sentry-sdk +charset-normalizer==3.3.2 + # via requests +click==8.1.7 + # via uvicorn +fastapi==0.105.0 + # via mittaridatapumppu-endpoint (pyproject.toml) +fvhiot @ https://github.com/ForumViriumHelsinki/FVHIoT-python/archive/refs/tags/v0.4.1.zip + # via mittaridatapumppu-endpoint (pyproject.toml) +h11==0.14.0 + # via + # httpcore + # uvicorn +httpcore==1.0.2 + # via httpx +httpx==0.25.2 + # via mittaridatapumppu-endpoint (pyproject.toml) +idna==3.6 + # via + # anyio + # httpx + # requests +iniconfig==2.0.0 + # via pytest +kafka-python==2.0.2 + # via mittaridatapumppu-endpoint (pyproject.toml) +msgpack==1.0.7 + # via fvhiot +packaging==23.2 + # via + # aiokafka + # pytest +pluggy==1.3.0 + # via pytest +pydantic==2.5.2 + # via fastapi +pydantic-core==2.14.5 + # via pydantic +pytest==7.4.3 + # via mittaridatapumppu-endpoint (pyproject.toml) +python-multipart==0.0.6 + # via mittaridatapumppu-endpoint (pyproject.toml) +requests==2.31.0 + # via mittaridatapumppu-endpoint (pyproject.toml) +sentry-asgi==0.2.0 + # via mittaridatapumppu-endpoint (pyproject.toml) +sentry-sdk==1.39.1 + # via sentry-asgi +sniffio==1.3.0 + # via + # anyio + # httpx +starlette==0.27.0 + # via fastapi +typing-extensions==4.9.0 + # via + # fastapi + # pydantic + # pydantic-core +urllib3==2.1.0 + # via + # requests + # sentry-sdk +uvicorn==0.24.0.post1 + # via mittaridatapumppu-endpoint (pyproject.toml) diff --git a/requirements.txt b/requirements.txt index ab861d9..476ca10 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,93 +2,71 @@ # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile pyproject.toml +# pip-compile --output-file=requirements.txt --strip-extras # -aiokafka==0.8.1 - # via mittaridatapumppu-endpoint (pyproject.toml) -annotated-types==0.5.0 +aiokafka==0.10.0 + # via fvhiot +annotated-types==0.6.0 # via pydantic anyio==3.7.1 # via # fastapi - # httpcore + # httpx # starlette async-timeout==4.0.3 # via aiokafka -blinker==1.6.2 - # via flask -certifi==2023.7.22 +certifi==2023.11.17 # via # fvhiot # httpcore # httpx # sentry-sdk click==8.1.7 - # via - # flask - # uvicorn -fastapi==0.103.1 + # via uvicorn +fastapi==0.105.0 # via mittaridatapumppu-endpoint (pyproject.toml) -flask==2.3.3 - # via fvhiot -fvhiot @ https://github.com/ForumViriumHelsinki/FVHIoT-python/archive/refs/tags/v0.3.2.zip +fvhiot @ https://github.com/ForumViriumHelsinki/FVHIoT-python/archive/refs/tags/v0.4.1.zip # via mittaridatapumppu-endpoint (pyproject.toml) h11==0.14.0 # via # httpcore # uvicorn -httpcore==0.18.0 +httpcore==1.0.2 # via httpx -httpx==0.25.0 +httpx==0.25.2 # via mittaridatapumppu-endpoint (pyproject.toml) -idna==3.4 +idna==3.6 # via # anyio # httpx -itsdangerous==2.1.2 - # via flask -jinja2==3.1.2 - # via flask kafka-python==2.0.2 - # via - # aiokafka - # fvhiot - # mittaridatapumppu-endpoint (pyproject.toml) -markupsafe==2.1.3 - # via - # jinja2 - # werkzeug -msgpack==1.0.6 + # via mittaridatapumppu-endpoint (pyproject.toml) +msgpack==1.0.7 # via fvhiot -packaging==23.1 +packaging==23.2 # via aiokafka -pydantic==2.4.1 +pydantic==2.5.2 # via fastapi -pydantic-core==2.10.1 +pydantic-core==2.14.5 # via pydantic python-multipart==0.0.6 # via mittaridatapumppu-endpoint (pyproject.toml) sentry-asgi==0.2.0 # via mittaridatapumppu-endpoint (pyproject.toml) -sentry-sdk==1.31.0 +sentry-sdk==1.39.1 # via sentry-asgi sniffio==1.3.0 # via # anyio - # httpcore # httpx starlette==0.27.0 - # via - # fastapi - # fvhiot -typing-extensions==4.8.0 + # via fastapi +typing-extensions==4.9.0 # via # fastapi # pydantic # pydantic-core -urllib3==2.0.5 +urllib3==2.1.0 # via sentry-sdk -uvicorn==0.23.2 +uvicorn==0.24.0.post1 # via mittaridatapumppu-endpoint (pyproject.toml) -werkzeug==2.3.7 - # via flask diff --git a/tests/test_api2.py b/tests/test_api2.py index 62609f7..7954571 100644 --- a/tests/test_api2.py +++ b/tests/test_api2.py @@ -82,7 +82,9 @@ def test_digita_endpoint_up(): url = f"{API_BASE_URL}/api/v1/digita" resp = httpx.get(url) assert resp.status_code == 401, "digita v1 is up" - assert resp.text.startswith("Missing or invalid authentication token"), "digita v1 is up" + assert resp.text.startswith( + "Missing or invalid authentication token" + ), "digita v1 is up" def test_digita_endppoint_authenticated_access(): @@ -91,7 +93,9 @@ def test_digita_endppoint_authenticated_access(): params = THINGPARK_PARAMS.copy() payload = THINGPARK_PAYLOAD.copy() # Replace Time with ~current time - ts = (datetime.now(timezone.utc) - timedelta(seconds=1)).strftime("%Y-%m-%dT%H:%M:%S.%f%z") + ts = (datetime.now(timezone.utc) - timedelta(seconds=1)).strftime( + "%Y-%m-%dT%H:%M:%S.%f%z" + ) payload["DevEUI_uplink"]["Time"] = ts resp = httpx.post(url, headers=headers, params=params, data=payload) logging.info(resp.text) diff --git a/tests/test_api_cesva.py b/tests/test_api_cesva.py index 62f27c1..e77c78f 100644 --- a/tests/test_api_cesva.py +++ b/tests/test_api_cesva.py @@ -15,10 +15,22 @@ # Body PAYLOAD = { "sensors": [ - {"sensor": "TA120-T246187-N", "observations": [{"value": "61.2", "timestamp": "24/02/2022T17:45:15UTC"}]}, - {"sensor": "TA120-T246187-O", "observations": [{"value": "false", "timestamp": "24/02/2022T17:45:15UTC"}]}, - {"sensor": "TA120-T246187-U", "observations": [{"value": "false", "timestamp": "24/02/2022T17:45:15UTC"}]}, - {"sensor": "TA120-T246187-M", "observations": [{"value": "77", "timestamp": "24/02/2022T17:45:15UTC"}]}, + { + "sensor": "TA120-T246187-N", + "observations": [{"value": "61.2", "timestamp": "24/02/2022T17:45:15UTC"}], + }, + { + "sensor": "TA120-T246187-O", + "observations": [{"value": "false", "timestamp": "24/02/2022T17:45:15UTC"}], + }, + { + "sensor": "TA120-T246187-U", + "observations": [{"value": "false", "timestamp": "24/02/2022T17:45:15UTC"}], + }, + { + "sensor": "TA120-T246187-M", + "observations": [{"value": "77", "timestamp": "24/02/2022T17:45:15UTC"}], + }, { "sensor": "TA120-T246187-S", "observations": [