diff --git a/.github/workflows/boilerplate.yml b/.github/workflows/boilerplate.yml index 862b33f87..8b2732fd3 100644 --- a/.github/workflows/boilerplate.yml +++ b/.github/workflows/boilerplate.yml @@ -28,7 +28,7 @@ jobs: fetch-depth: 0 - name: Get changed files id: changed-files - uses: tj-actions/changed-files@v31 + uses: tj-actions/changed-files@v41 with: files: | **/*.py diff --git a/.github/workflows/requirements.yml b/.github/workflows/requirements.yml index 4502b183f..2eb1b7932 100644 --- a/.github/workflows/requirements.yml +++ b/.github/workflows/requirements.yml @@ -39,7 +39,7 @@ jobs: run: python -m pip install pip-check-reqs - name: Check extra core requirements - run: pip-extra-reqs -r werkzeug -r python-multipart covalent covalent_dispatcher covalent_ui --ignore-requirement=qiskit --ignore-requirement=qiskit-ibm-provider --ignore-requirement=amazon-braket-pennylane-plugin + run: pip-extra-reqs -r python-multipart covalent covalent_dispatcher covalent_ui --ignore-requirement=qiskit --ignore-requirement=qiskit-ibm-provider --ignore-requirement=amazon-braket-pennylane-plugin - name: Check missing SDK requirements run: > diff --git a/CHANGELOG.md b/CHANGELOG.md index e3e6a1376..2f670f14b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,47 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [UNRELEASED] +## [0.235.1-rc.0] - 2024-06-10 + +### Authors + +- Santosh kumar <29346072+santoshkumarradha@users.noreply.github.com> +- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> +- Co-authored-by: kessler-frost + + +### Fixed + +- Ignoring all errors when importing qelectrons instead of only `ImportError` + +## [0.235.0-rc.0] - 2024-05-29 + +### Authors + +- Ara Ghukasyan <38226926+araghukas@users.noreply.github.com> +- Casey Jao + + +### Changed + +- Updated Slurm plugin docs to note possible SSH limitation +- Updated Slurm plugin docs to remove `sshproxy` section +- API base endpoint is now configurable from an environment variable +- Removed unused lattice attributes to reduce asset uploads + +### Fixed + +- Improved handling of Covalent version mismatches between client and + executor environments + +### Removed + +- Removed obsolete `migrate-pickled-result-object` command + +### Operations + +- Allow installing a specific commit sha to ease testing + ## [0.234.1-rc.0] - 2024-05-10 ### Authors diff --git a/VERSION b/VERSION index 9d818f2d5..abee87d30 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.234.1-rc.0 \ No newline at end of file +0.235.1-rc.0 \ No newline at end of file diff --git a/covalent/__init__.py b/covalent/__init__.py index 0cfc7bbe7..d644a57d6 100644 --- a/covalent/__init__.py +++ b/covalent/__init__.py @@ -51,7 +51,8 @@ from ._workflow.electron import wait # nopycln: import from .executor.utils import get_context # nopycln: import -with contextlib.suppress(ImportError): +with contextlib.suppress(Exception): + # try to load qelectron modules from ._workflow.qelectron import qelectron # nopycln: import from .quantum import QCluster # nopycln: import diff --git a/covalent/_api/apiclient.py b/covalent/_api/apiclient.py index c4c2a5492..d3be6bd4a 100644 --- a/covalent/_api/apiclient.py +++ b/covalent/_api/apiclient.py @@ -33,7 +33,7 @@ def __init__(self, dispatcher_addr: str, adapter: HTTPAdapter = None, auto_raise self.adapter = adapter self.auto_raise = auto_raise - def prepare_headers(self, **kwargs): + def prepare_headers(self, kwargs): extra_headers = CovalentAPIClient.get_extra_headers() headers = kwargs.get("headers", {}) if headers: @@ -42,7 +42,7 @@ def prepare_headers(self, **kwargs): return headers def get(self, endpoint: str, **kwargs): - headers = self.prepare_headers(**kwargs) + headers = self.prepare_headers(kwargs) url = self.dispatcher_addr + endpoint try: with requests.Session() as session: @@ -62,7 +62,7 @@ def get(self, endpoint: str, **kwargs): return r def put(self, endpoint: str, **kwargs): - headers = self.prepare_headers() + headers = self.prepare_headers(kwargs) url = self.dispatcher_addr + endpoint try: with requests.Session() as session: @@ -81,7 +81,7 @@ def put(self, endpoint: str, **kwargs): return r def post(self, endpoint: str, **kwargs): - headers = self.prepare_headers() + headers = self.prepare_headers(kwargs) url = self.dispatcher_addr + endpoint try: with requests.Session() as session: @@ -100,7 +100,7 @@ def post(self, endpoint: str, **kwargs): return r def delete(self, endpoint: str, **kwargs): - headers = self.prepare_headers() + headers = self.prepare_headers(kwargs) url = self.dispatcher_addr + endpoint try: with requests.Session() as session: diff --git a/covalent/_dispatcher_plugins/local.py b/covalent/_dispatcher_plugins/local.py index 8760cec96..9857342cf 100644 --- a/covalent/_dispatcher_plugins/local.py +++ b/covalent/_dispatcher_plugins/local.py @@ -14,6 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +import os import tempfile from copy import deepcopy from functools import wraps @@ -47,6 +48,9 @@ dispatch_cache_dir.mkdir(parents=True, exist_ok=True) +BASE_ENDPOINT = os.getenv("COVALENT_DISPATCH_BASE_ENDPOINT", "/api/v2/dispatches") + + def get_redispatch_request_body_v2( dispatch_id: str, staging_dir: str, @@ -540,10 +544,10 @@ def register_manifest( dispatcher_addr = format_server_url() stripped = strip_local_uris(manifest) if push_assets else manifest - endpoint = "/api/v2/dispatches" + endpoint = BASE_ENDPOINT if parent_dispatch_id: - endpoint = f"{endpoint}/{parent_dispatch_id}/subdispatches" + endpoint = f"{BASE_ENDPOINT}/{parent_dispatch_id}/sublattices" r = APIClient(dispatcher_addr).post(endpoint, data=stripped.model_dump_json()) r.raise_for_status() @@ -596,7 +600,7 @@ def _upload(assets: List[AssetSchema]): number_uploaded = 0 for i, asset in enumerate(assets): if not asset.remote_uri or not asset.uri: - app_log.debug(f"Skipping asset {i+1} out of {total}") + app_log.debug(f"Skipping asset {i + 1} out of {total}") continue if asset.remote_uri.startswith(local_scheme_prefix): copy_file_locally(asset.uri, asset.remote_uri) @@ -604,7 +608,7 @@ def _upload(assets: List[AssetSchema]): else: _upload_asset(asset.uri, asset.remote_uri) number_uploaded += 1 - app_log.debug(f"Uploaded asset {i+1} out of {total}.") + app_log.debug(f"Uploaded asset {i + 1} out of {total}.") app_log.debug(f"uploaded {number_uploaded} assets.") @@ -615,6 +619,7 @@ def _upload_asset(local_uri, remote_uri): else: local_path = local_uri + filesize = os.path.getsize(local_path) with open(local_path, "rb") as reader: app_log.debug(f"uploading to {remote_uri}") f = furl(remote_uri) @@ -624,6 +629,11 @@ def _upload_asset(local_uri, remote_uri): dispatcher_addr = f"{scheme}://{host}:{port}" endpoint = str(f.path) api_client = APIClient(dispatcher_addr) + if f.query: + endpoint = f"{endpoint}?{f.query}" + + # Workaround for Requests bug when streaming from empty files + data = reader.read() if filesize < 50 else reader - r = api_client.put(endpoint, data=reader) + r = api_client.put(endpoint, headers={"Content-Length": str(filesize)}, data=data) r.raise_for_status() diff --git a/covalent/_results_manager/result.py b/covalent/_results_manager/result.py index a42f514a6..8a6e3520b 100644 --- a/covalent/_results_manager/result.py +++ b/covalent/_results_manager/result.py @@ -18,7 +18,7 @@ import os import re from datetime import datetime -from typing import TYPE_CHECKING, Any, Dict, List, Set, Union +from typing import TYPE_CHECKING, Any, Dict, List, Union from .._shared_files import logger from .._shared_files.config import get_config @@ -516,34 +516,3 @@ def _convert_to_electron_result(self) -> Any: """ return self._result - - -def _filter_cova_decorators(function_string: str, cova_imports: Set[str]) -> str: - """ - Given a string representing a function, comment out any Covalent-related decorators. - - Args - function_string: A string representation of a workflow function. - - Returns: - The function string with Covalent-related decorators commented out. - """ - - has_cova_decorator = False - in_decorator = 0 - function_lines = function_string.split("\n") - for i in range(len(function_lines)): - line = function_lines[i].strip() - if in_decorator > 0: - function_lines[i] = f"# {function_lines[i]}" - in_decorator += line.count("(") - in_decorator -= line.count(")") - elif line.startswith("@"): - decorator_name = line.split("@")[1].split(".")[0].split("(")[0] - if decorator_name in cova_imports: - function_lines[i] = f"# {function_lines[i]}" - has_cova_decorator = True - in_decorator += line.count("(") - in_decorator -= line.count(")") - - return "\n".join(function_lines) if has_cova_decorator else function_string diff --git a/covalent/_serialize/electron.py b/covalent/_serialize/electron.py index fe5763675..b90879fbf 100644 --- a/covalent/_serialize/electron.py +++ b/covalent/_serialize/electron.py @@ -210,8 +210,8 @@ def _get_node_custom_assets(node_attrs: dict) -> Dict[str, AssetSchema]: def serialize_node(node_id: int, node_attrs: dict, node_storage_path) -> ElectronSchema: meta = _serialize_node_metadata(node_attrs, node_storage_path) assets = _serialize_node_assets(node_attrs, node_storage_path) - custom_assets = _get_node_custom_assets(node_attrs) - return ElectronSchema(id=node_id, metadata=meta, assets=assets, custom_assets=custom_assets) + assets._custom = _get_node_custom_assets(node_attrs) + return ElectronSchema(id=node_id, metadata=meta, assets=assets) def deserialize_node(e: ElectronSchema, metadata_only: bool = False) -> dict: diff --git a/covalent/_serialize/lattice.py b/covalent/_serialize/lattice.py index 3ab39f2bc..3d61fcfc1 100644 --- a/covalent/_serialize/lattice.py +++ b/covalent/_serialize/lattice.py @@ -40,10 +40,6 @@ "workflow_function_string": AssetType.TEXT, "doc": AssetType.TEXT, "inputs": AssetType.TRANSPORTABLE, - "named_args": AssetType.TRANSPORTABLE, - "named_kwargs": AssetType.TRANSPORTABLE, - "cova_imports": AssetType.JSONABLE, - "lattice_imports": AssetType.TEXT, "hooks": AssetType.JSONABLE, } @@ -112,33 +108,6 @@ def _serialize_lattice_assets(lat, storage_path: str) -> LatticeAssets: lat.inputs, ASSET_TYPES["inputs"], storage_path, ASSET_FILENAME_MAP["inputs"] ) - # Deprecate - named_args_asset = save_asset( - lat.named_args, - ASSET_TYPES["named_args"], - storage_path, - ASSET_FILENAME_MAP["named_args"], - ) - named_kwargs_asset = save_asset( - lat.named_kwargs, - ASSET_TYPES["named_kwargs"], - storage_path, - ASSET_FILENAME_MAP["named_kwargs"], - ) - cova_imports_asset = save_asset( - lat.cova_imports, - ASSET_TYPES["cova_imports"], - storage_path, - ASSET_FILENAME_MAP["cova_imports"], - ) - lattice_imports_asset = save_asset( - lat.lattice_imports, - ASSET_TYPES["lattice_imports"], - storage_path, - ASSET_FILENAME_MAP["lattice_imports"], - ) - - # NOTE: these are actually JSONable hooks_asset = save_asset( lat.metadata["hooks"], ASSET_TYPES["hooks"], @@ -151,10 +120,6 @@ def _serialize_lattice_assets(lat, storage_path: str) -> LatticeAssets: workflow_function_string=workflow_func_str_asset, doc=docstring_asset, inputs=inputs_asset, - named_args=named_args_asset, - named_kwargs=named_kwargs_asset, - cova_imports=cova_imports_asset, - lattice_imports=lattice_imports_asset, hooks=hooks_asset, ) @@ -166,20 +131,12 @@ def _deserialize_lattice_assets(assets: LatticeAssets) -> dict: ) doc = load_asset(assets.doc, ASSET_TYPES["doc"]) inputs = load_asset(assets.inputs, ASSET_TYPES["inputs"]) - named_args = load_asset(assets.named_args, ASSET_TYPES["named_args"]) - named_kwargs = load_asset(assets.named_kwargs, ASSET_TYPES["named_kwargs"]) - cova_imports = load_asset(assets.cova_imports, ASSET_TYPES["cova_imports"]) - lattice_imports = load_asset(assets.lattice_imports, ASSET_TYPES["lattice_imports"]) hooks = load_asset(assets.hooks, ASSET_TYPES["hooks"]) return { "workflow_function": workflow_function, "workflow_function_string": workflow_function_string, "__doc__": doc, "inputs": inputs, - "named_args": named_args, - "named_kwargs": named_kwargs, - "cova_imports": cova_imports, - "lattice_imports": lattice_imports, "metadata": { "hooks": hooks, }, @@ -194,12 +151,10 @@ def _get_lattice_custom_assets(lat: Lattice) -> Dict[str, AssetSchema]: def serialize_lattice(lat, storage_path: str) -> LatticeSchema: meta = _serialize_lattice_metadata(lat) assets = _serialize_lattice_assets(lat, storage_path) - custom_assets = _get_lattice_custom_assets(lat) + assets._custom = _get_lattice_custom_assets(lat) tg = serialize_transport_graph(lat.transport_graph, storage_path) - return LatticeSchema( - metadata=meta, assets=assets, custom_assets=custom_assets, transport_graph=tg - ) + return LatticeSchema(metadata=meta, assets=assets, transport_graph=tg) def deserialize_lattice(model: LatticeSchema) -> Lattice: diff --git a/covalent/_shared_files/schemas/electron.py b/covalent/_shared_files/schemas/electron.py index b245cc93d..c5da65e1d 100644 --- a/covalent/_shared_files/schemas/electron.py +++ b/covalent/_shared_files/schemas/electron.py @@ -19,7 +19,7 @@ from datetime import datetime from typing import Dict, Optional -from pydantic import BaseModel, field_validator +from pydantic import BaseModel from .asset import AssetSchema from .common import StatusEnum @@ -91,6 +91,8 @@ class ElectronAssets(BaseModel): # user dependent assets hooks: AssetSchema + _custom: Optional[Dict[str, AssetSchema]] = None + class ElectronMetadata(BaseModel): task_group_id: int @@ -103,6 +105,8 @@ class ElectronMetadata(BaseModel): start_time: Optional[datetime] = None end_time: Optional[datetime] = None + _custom: Optional[Dict] = None + # For use by redispatch def reset(self): self.status = StatusEnum.NEW_OBJECT @@ -114,12 +118,3 @@ class ElectronSchema(BaseModel): id: int metadata: ElectronMetadata assets: ElectronAssets - custom_assets: Optional[Dict[str, AssetSchema]] = None - - @field_validator("custom_assets") - def check_custom_asset_keys(cls, v): - if v is not None: - for key in v: - if key in ASSET_FILENAME_MAP: - raise ValueError(f"Asset {key} conflicts with built-in key") - return v diff --git a/covalent/_shared_files/schemas/lattice.py b/covalent/_shared_files/schemas/lattice.py index 6a3e2bbf9..783b966ee 100644 --- a/covalent/_shared_files/schemas/lattice.py +++ b/covalent/_shared_files/schemas/lattice.py @@ -18,7 +18,7 @@ from typing import Dict, Optional -from pydantic import BaseModel, field_validator +from pydantic import BaseModel from .asset import AssetSchema from .transport_graph import TransportGraphSchema @@ -39,10 +39,6 @@ "workflow_function_string", "__doc__", "inputs", - "named_args", - "named_kwargs", - "cova_imports", - "lattice_imports", # user dependent assets "hooks", } @@ -83,14 +79,18 @@ class LatticeAssets(BaseModel): workflow_function_string: AssetSchema doc: AssetSchema # __doc__ inputs: AssetSchema - named_args: AssetSchema - named_kwargs: AssetSchema - cova_imports: AssetSchema - lattice_imports: AssetSchema + + # Deprecated + named_args: AssetSchema = AssetSchema(size=0) + named_kwargs: AssetSchema = AssetSchema(size=0) + cova_imports: AssetSchema = AssetSchema(size=0) + lattice_imports: AssetSchema = AssetSchema(size=0) # lattice.metadata hooks: AssetSchema + _custom: Optional[Dict[str, AssetSchema]] = None + class LatticeMetadata(BaseModel): name: str # __name__ @@ -101,18 +101,11 @@ class LatticeMetadata(BaseModel): python_version: Optional[str] = None covalent_version: Optional[str] = None + _custom: Optional[Dict] = None + class LatticeSchema(BaseModel): metadata: LatticeMetadata assets: LatticeAssets - custom_assets: Optional[Dict[str, AssetSchema]] = None transport_graph: TransportGraphSchema - - @field_validator("custom_assets") - def check_custom_asset_keys(cls, v): - if v is not None: - for key in v: - if key in ASSET_FILENAME_MAP: - raise ValueError(f"Asset {key} conflicts with built-in key") - return v diff --git a/covalent/_shared_files/schemas/result.py b/covalent/_shared_files/schemas/result.py index fa771bf9b..3160c3708 100644 --- a/covalent/_shared_files/schemas/result.py +++ b/covalent/_shared_files/schemas/result.py @@ -17,7 +17,7 @@ """FastAPI models for /api/v1/resultv2 endpoints""" from datetime import datetime -from typing import Optional +from typing import Dict, Optional from pydantic import BaseModel @@ -54,6 +54,8 @@ class ResultMetadata(BaseModel): start_time: Optional[datetime] = None end_time: Optional[datetime] = None + _custom: Optional[Dict] = None + # For use by redispatch def reset(self): self.dispatch_id = "" @@ -67,6 +69,8 @@ class ResultAssets(BaseModel): result: AssetSchema error: AssetSchema + _custom: Optional[Dict[str, AssetSchema]] = None + class ResultSchema(BaseModel): metadata: ResultMetadata diff --git a/covalent/_workflow/electron.py b/covalent/_workflow/electron.py index 0e80f0a22..e6a6e4648 100644 --- a/covalent/_workflow/electron.py +++ b/covalent/_workflow/electron.py @@ -429,6 +429,11 @@ def __call__(self, *args, **kwargs) -> Union[Any, "Electron"]: active_lattice.replace_electrons[name] = replacement_electron return bound_electron + # Avoid direct attribute access since that might trigger + # Electron.__getattr__ when executors build sublattices + # constructed with older versions of Covalent + function_string = self.__dict__.get("_function_string") + # Handle sublattices by injecting _build_sublattice_graph node if isinstance(self.function, Lattice): parent_metadata = active_lattice.metadata.copy() @@ -443,7 +448,6 @@ def __call__(self, *args, **kwargs) -> Union[Any, "Electron"]: ) name = sublattice_prefix + self.function.__name__ - function_string = self._function_string bound_electron = sub_electron( self.function, json.dumps(parent_metadata), *args, **kwargs ) @@ -464,7 +468,7 @@ def __call__(self, *args, **kwargs) -> Union[Any, "Electron"]: name=self.function.__name__, function=self.function, metadata=self.metadata.copy(), - function_string=self._function_string, + function_string=function_string, task_group_id=self.task_group_id if self.packing_tasks else None, ) self.task_group_id = self.task_group_id if self.packing_tasks else self.node_id @@ -847,16 +851,6 @@ def wait(child, parents): return child -@electron -def to_decoded_electron_collection(**x): - """Interchanges order of serialize -> collection""" - collection = list(x.values())[0] - if isinstance(collection, list): - return TransportableObject.deserialize_list(collection) - elif isinstance(collection, dict): - return TransportableObject.deserialize_dict(collection) - - # Copied from runner.py def _build_sublattice_graph(sub: Lattice, json_parent_metadata: str, *args, **kwargs): import os @@ -868,6 +862,8 @@ def _build_sublattice_graph(sub: Lattice, json_parent_metadata: str, *args, **kw sub.build_graph(*args, **kwargs) + DISABLE_LEGACY_SUBLATTICES = os.environ.get("COVALENT_DISABLE_LEGACY_SUBLATTICES") == "1" + try: # Attempt multistage sublattice dispatch. For now we require # the executor to reach the Covalent server @@ -891,5 +887,7 @@ def _build_sublattice_graph(sub: Lattice, json_parent_metadata: str, *args, **kw except Exception as ex: # Fall back to legacy sublattice handling + if DISABLE_LEGACY_SUBLATTICES: + raise print("Falling back to legacy sublattice handling") return sub.serialize_to_json() diff --git a/covalent/_workflow/lattice.py b/covalent/_workflow/lattice.py index 84f74f6b1..146b837d9 100644 --- a/covalent/_workflow/lattice.py +++ b/covalent/_workflow/lattice.py @@ -47,7 +47,7 @@ from ..executor import BaseExecutor from ..triggers import BaseTrigger -from .._shared_files.utils import get_imports, get_serialized_function_str +from .._shared_files.utils import get_serialized_function_str consumable_constraints = [] @@ -81,10 +81,7 @@ def __init__( self.__doc__ = self.workflow_function.__doc__ self.post_processing = False self.inputs = None - self.named_args = None - self.named_kwargs = None self.electron_outputs = {} - self.lattice_imports, self.cova_imports = get_imports(self.workflow_function) self.workflow_function = TransportableObject.make_transportable(self.workflow_function) @@ -105,8 +102,6 @@ def serialize_to_json(self) -> str: attributes["transport_graph"] = self.transport_graph.serialize_to_json() attributes["inputs"] = self.inputs.to_dict() - attributes["named_args"] = self.named_args.to_dict() - attributes["named_kwargs"] = self.named_kwargs.to_dict() attributes["electron_outputs"] = {} for node_name, output in self.electron_outputs.items(): @@ -121,8 +116,6 @@ def deserialize_from_json(json_data: str) -> None: for node_name, object_dict in attributes["electron_outputs"].items(): attributes["electron_outputs"][node_name] = TransportableObject.from_dict(object_dict) - attributes["named_kwargs"] = TransportableObject.from_dict(attributes["named_kwargs"]) - attributes["named_args"] = TransportableObject.from_dict(attributes["named_args"]) attributes["inputs"] = TransportableObject.from_dict(attributes["inputs"]) if attributes["transport_graph"]: @@ -209,9 +202,6 @@ def build_graph(self, *args, **kwargs) -> None: new_kwargs = dict(named_kwargs.items()) self.inputs = TransportableObject({"args": args, "kwargs": kwargs}) - self.named_args = TransportableObject(named_args) - self.named_kwargs = TransportableObject(named_kwargs) - self.lattice_imports, self.cova_imports = get_imports(workflow_function) # Set any lattice metadata not explicitly set by the user constraint_names = {"executor", "workflow_executor", "hooks"} diff --git a/covalent/triggers/database_trigger.py b/covalent/triggers/database_trigger.py index d2a53402e..6baa0141c 100644 --- a/covalent/triggers/database_trigger.py +++ b/covalent/triggers/database_trigger.py @@ -19,9 +19,6 @@ from threading import Event from typing import List -from sqlalchemy import create_engine -from sqlalchemy.orm import Session - from covalent._shared_files import logger from .base import BaseTrigger @@ -88,6 +85,12 @@ def observe(self) -> None: where conditions are met or until stop has being called """ + # Since these modules are only used server-side, delay their + # imports to avoid introducing a sqlalchemy requirement to + # SDK-only installs + from sqlalchemy import create_engine + from sqlalchemy.orm import Session + app_log.debug("Inside DatabaseTrigger's observe") event_count = 0 diff --git a/covalent_dispatcher/_cli/cli.py b/covalent_dispatcher/_cli/cli.py index f24f24aaf..f352305e6 100644 --- a/covalent_dispatcher/_cli/cli.py +++ b/covalent_dispatcher/_cli/cli.py @@ -25,18 +25,7 @@ from rich.console import Console from .groups import db, deploy -from .service import ( - cluster, - config, - logs, - migrate_legacy_result_object, - print_header, - purge, - restart, - start, - status, - stop, -) +from .service import cluster, config, logs, print_header, purge, restart, start, status, stop # Main entrypoint @@ -73,7 +62,6 @@ def cli(ctx: click.Context, version: bool) -> None: cli.add_command(cluster) cli.add_command(db) cli.add_command(config) -cli.add_command(migrate_legacy_result_object) cli.add_command(deploy) if __name__ == "__main__": diff --git a/covalent_dispatcher/_cli/migrate.py b/covalent_dispatcher/_cli/migrate.py deleted file mode 100644 index 032aafbf0..000000000 --- a/covalent_dispatcher/_cli/migrate.py +++ /dev/null @@ -1,208 +0,0 @@ -# Copyright 2021 Agnostiq Inc. -# -# This file is part of Covalent. -# -# Licensed under the Apache License 2.0 (the "License"). A copy of the -# License may be obtained with this software package or at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Use of this file is prohibited except in compliance with the License. -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Utils for migrating legacy (0.110-era) result object to a modern result object.""" - -import pickle - -from covalent._results_manager import Result -from covalent._shared_files import logger -from covalent._shared_files.defaults import ( - attr_prefix, - electron_dict_prefix, - electron_list_prefix, - generator_prefix, - parameter_prefix, - subscript_prefix, -) -from covalent._shared_files.utils import get_named_params -from covalent._workflow.electron import to_decoded_electron_collection -from covalent._workflow.lattice import Lattice -from covalent._workflow.transport import TransportableObject, _TransportGraph, encode_metadata - -from .._db import update - -app_log = logger.app_log -log_stack_info = logger.log_stack_info - - -def process_node(node: dict) -> dict: - """Convert a node from a 0.110.2-vintage transport graph - - Args: - node: dictionary of node attributes - - Returns: - the converted node attributes - """ - - if "metadata" in node: - node["metadata"] = encode_metadata(node["metadata"]) - if "deps" not in node["metadata"]: - node["metadata"]["deps"] = {} - if "call_before" not in node["metadata"]: - node["metadata"]["call_before"] = [] - if "call_after" not in node["metadata"]: - node["metadata"]["call_after"] = [] - - node_name = node["name"] - - # encode output, remove "attribute_name", strip "attr_prefix" from name - if node_name.startswith(attr_prefix): - node["output"] = TransportableObject.make_transportable(node["output"]) - if "attribute_name" in node: - del node["attribute_name"] - new_node_name = node_name.replace(attr_prefix, "") - node["name"] = new_node_name - - # encode output, remove "key", strip "generator_prefix" from name - elif node_name.startswith(generator_prefix): - node["output"] = TransportableObject.make_transportable(node["output"]) - if "key" in node: - del node["key"] - new_node_name = node_name.replace(generator_prefix, "") - node["name"] = new_node_name - - # encode output, remove "key", strip "subscript_prefix" from name - elif node_name.startswith(subscript_prefix): - node["output"] = TransportableObject.make_transportable(node["output"]) - if "key" in node: - del node["key"] - new_node_name = node_name.replace(subscript_prefix, "") - node["name"] = new_node_name - - # Replace function for collection nodes - elif node_name.startswith(electron_list_prefix) or node_name.startswith(electron_dict_prefix): - node["function"] = TransportableObject(to_decoded_electron_collection) - - # Encode "value" and "output" for parameter nodes - elif node_name.startswith(parameter_prefix): - node["value"] = TransportableObject.make_transportable(node["value"]) - node["output"] = TransportableObject.make_transportable(node["output"]) - - # Function nodes: encode output and sublattice_result - else: - node["output"] = TransportableObject.make_transportable(node["output"]) - if "sublattice_result" in node: - if node["sublattice_result"] is not None: - node["sublattice_result"] = process_result_object(node["sublattice_result"]) - - return node - - -def process_transport_graph(tg: _TransportGraph) -> _TransportGraph: - """Convert a 0.110.2-vintage transport graph to a modern transport graph - - Args: - tg: old Transport Graph - - Returns: - the modernized Transport Graph - """ - tg_new = _TransportGraph() - g = tg.get_internal_graph_copy() - for node_id in g.nodes: - app_log.debug(f"Processing node {node_id}") - process_node(g.nodes[node_id]) - - if tg.lattice_metadata: - tg.lattice_metadata = encode_metadata(tg.lattice_metadata) - - tg_new._graph = g - return tg_new - - -def process_lattice(lattice: Lattice) -> Lattice: - """Convert a "legacy" (0.110.2) Lattice to a modern Lattice - - Args: - lattice: old lattice - - Returns: - the modernized lattice - """ - - workflow_function = lattice.workflow_function - lattice.workflow_function = TransportableObject.make_transportable(workflow_function) - inputs = {"args": lattice.args, "kwargs": lattice.kwargs} - lattice.inputs = TransportableObject(inputs) - - workflow_function = lattice.workflow_function.get_deserialized() - - named_args, named_kwargs = get_named_params(workflow_function, lattice.args, lattice.kwargs) - lattice.named_args = TransportableObject(named_args) - lattice.named_kwargs = TransportableObject(named_kwargs) - - metadata = lattice.metadata - - if "workflow_executor" not in metadata: - metadata["workflow_executor"] = "local" - - metadata = encode_metadata(metadata) - lattice.metadata = metadata - lattice.metadata["deps"] = {} - lattice.metadata["call_before"] = [] - lattice.metadata["call_after"] = [] - - lattice.transport_graph = process_transport_graph(lattice.transport_graph) - lattice.transport_graph.lattice_metadata = lattice.metadata - app_log.debug("Processed transport graph") - - # Delete raw inputs - del lattice.__dict__["args"] - del lattice.__dict__["kwargs"] - - return lattice - - -def process_result_object(result_object: Result) -> Result: - """Convert a "legacy" (0.110.2) Result object to a modern Result object - - Args: - result_object: the old Result object - - Returns: - the modernized result object - """ - - app_log.debug(f"Processing result object for dispatch {result_object.dispatch_id}") - process_lattice(result_object._lattice) - app_log.debug("Processed lattice") - - result_object._result = TransportableObject.make_transportable(result_object._result) - tg = result_object.lattice.transport_graph - for n in tg._graph.nodes: - tg.dirty_nodes.append(n) - - del result_object.__dict__["_inputs"] - return result_object - - -def migrate_pickled_result_object(path: str) -> None: - """Save legacy (0.110.2) result pickle file to a DataStore. - - This first transforms certain legacy properties of the result - object and then persists the result object to the datastore. - - Args: - path: path of the `result.pkl` file - """ - - with open(path, "rb") as f: - result_object = pickle.load(f) - - process_result_object(result_object) - update.persist(result_object) diff --git a/covalent_dispatcher/_cli/service.py b/covalent_dispatcher/_cli/service.py index df2299a8f..73d63da0b 100644 --- a/covalent_dispatcher/_cli/service.py +++ b/covalent_dispatcher/_cli/service.py @@ -56,7 +56,6 @@ from covalent._shared_files.config import ConfigManager, get_config, reload_config, set_config from .._db.datastore import DataStore -from .migrate import migrate_pickled_result_object UI_PIDFILE = get_config("dispatcher.cache_dir") + "/ui.pid" UI_LOGFILE = get_config("user_interface.log_dir") + "/covalent_ui.log" @@ -787,17 +786,6 @@ def logs() -> None: ) -@click.command() -@click.argument("result_pickle_path") -def migrate_legacy_result_object(result_pickle_path) -> None: - """Migrate a legacy result object - - Example: `covalent migrate-legacy-result-object result.pkl` - """ - - migrate_pickled_result_object(result_pickle_path) - - # Cluster CLI handlers (client side wrappers for the async handlers exposed # in the dask cluster process) async def _get_cluster_status(uri: str): diff --git a/covalent_dispatcher/_dal/db_interfaces/lattice_utils.py b/covalent_dispatcher/_dal/db_interfaces/lattice_utils.py index 676d0b68c..9871caefb 100644 --- a/covalent_dispatcher/_dal/db_interfaces/lattice_utils.py +++ b/covalent_dispatcher/_dal/db_interfaces/lattice_utils.py @@ -28,10 +28,6 @@ "name", "doc", "inputs", - "named_args", - "named_kwargs", - "cova_imports", - "lattice_imports", } METADATA_KEYS = lattice.LATTICE_METADATA_KEYS.copy() @@ -68,10 +64,6 @@ "workflow_function_string": "function_string_filename", "doc": "docstring_filename", "inputs": "inputs_filename", - "named_args": "named_args_filename", - "named_kwargs": "named_kwargs_filename", - "cova_imports": "cova_imports_filename", - "lattice_imports": "lattice_imports_filename", "executor_data": "executor_data_filename", "workflow_executor_data": "workflow_executor_data_filename", "hooks": "hooks_filename", diff --git a/covalent_dispatcher/_dal/importers/electron.py b/covalent_dispatcher/_dal/importers/electron.py index d4b5047c5..1f3ca51fc 100644 --- a/covalent_dispatcher/_dal/importers/electron.py +++ b/covalent_dispatcher/_dal/importers/electron.py @@ -133,6 +133,10 @@ def import_electron_assets( asset_recs = {} for asset_key, asset in e.assets: + # Register these later + if asset_key == "_custom": + continue + node_storage_path, object_key = object_store.get_uri_components( dispatch_id, e.id, @@ -157,8 +161,8 @@ def import_electron_assets( asset.remote_uri = f"file://{local_uri}" # Register custom assets - if e.custom_assets: - for asset_key, asset in e.custom_assets.items(): + if e.assets._custom: + for asset_key, asset in e.assets._custom.items(): object_key = f"{asset_key}.data" local_uri = os.path.join(node_storage_path, object_key) diff --git a/covalent_dispatcher/_dal/importers/lattice.py b/covalent_dispatcher/_dal/importers/lattice.py index a14938f98..55fa50925 100644 --- a/covalent_dispatcher/_dal/importers/lattice.py +++ b/covalent_dispatcher/_dal/importers/lattice.py @@ -24,16 +24,12 @@ from covalent._shared_files.config import get_config from covalent._shared_files.schemas.lattice import ( - LATTICE_COVA_IMPORTS_FILENAME, LATTICE_DOCSTRING_FILENAME, LATTICE_ERROR_FILENAME, LATTICE_FUNCTION_FILENAME, LATTICE_FUNCTION_STRING_FILENAME, LATTICE_HOOKS_FILENAME, LATTICE_INPUTS_FILENAME, - LATTICE_LATTICE_IMPORTS_FILENAME, - LATTICE_NAMED_ARGS_FILENAME, - LATTICE_NAMED_KWARGS_FILENAME, LATTICE_RESULTS_FILENAME, LATTICE_STORAGE_TYPE, LatticeAssets, @@ -71,12 +67,8 @@ def _get_lattice_meta(lat: LatticeSchema, storage_path) -> dict: "function_string_filename": LATTICE_FUNCTION_STRING_FILENAME, "error_filename": LATTICE_ERROR_FILENAME, "inputs_filename": LATTICE_INPUTS_FILENAME, - "named_args_filename": LATTICE_NAMED_ARGS_FILENAME, - "named_kwargs_filename": LATTICE_NAMED_KWARGS_FILENAME, "results_filename": LATTICE_RESULTS_FILENAME, "hooks_filename": LATTICE_HOOKS_FILENAME, - "cova_imports_filename": LATTICE_COVA_IMPORTS_FILENAME, - "lattice_imports_filename": LATTICE_LATTICE_IMPORTS_FILENAME, } kwargs.update(legacy_kwargs) return kwargs @@ -94,6 +86,10 @@ def import_lattice_assets( # Register built-in assets for asset_key, asset in lat.assets: + # Deal with these later + if asset_key == "_custom": + continue + storage_path, object_key = object_store.get_uri_components( dispatch_id=dispatch_id, node_id=None, @@ -118,8 +114,8 @@ def import_lattice_assets( asset.remote_uri = f"file://{local_uri}" # Register custom assets - if lat.custom_assets: - for asset_key, asset in lat.custom_assets.items(): + if lat.assets._custom: + for asset_key, asset in lat.assets._custom.items(): object_key = f"{asset_key}.data" local_uri = os.path.join(storage_path, object_key) diff --git a/covalent_dispatcher/_db/dispatchdb.py b/covalent_dispatcher/_db/dispatchdb.py index 621022777..e78a02d6d 100644 --- a/covalent_dispatcher/_db/dispatchdb.py +++ b/covalent_dispatcher/_db/dispatchdb.py @@ -20,7 +20,6 @@ from datetime import datetime import networkx as nx -import simplejson import covalent.executor as covalent_executor from covalent._shared_files import logger @@ -125,38 +124,6 @@ def result_encoder(obj): return str(obj) -def encode_result(result_obj): - lattice = result_obj.lattice - - result_string = result_obj.encoded_result.json - if not result_string: - result_string = result_obj.encoded_result.object_string - - named_args = {k: v.object_string for k, v in lattice.named_args.items()} - named_kwargs = {k: v.object_string for k, v in lattice.named_kwargs.items()} - result_dict = { - "dispatch_id": result_obj.dispatch_id, - "status": result_obj.status, - "result": result_string, - "start_time": result_obj.start_time, - "end_time": result_obj.end_time, - "results_dir": result_obj.results_dir, - "error": result_obj.error, - "lattice": { - "function_string": lattice.workflow_function_string, - "doc": lattice.__doc__, - "name": lattice.__name__, - "inputs": encode_dict({**named_args, **named_kwargs}), - "metadata": extract_metadata(lattice.metadata), - }, - "graph": extract_graph(result_obj.lattice.transport_graph._graph), - } - - jsonified_result = simplejson.dumps(result_dict, default=result_encoder, ignore_nan=True) - - return jsonified_result - - class DispatchDB: """ Wrapper for the database of workflows. diff --git a/covalent_dispatcher/_db/models.py b/covalent_dispatcher/_db/models.py index 7e0521c35..e61f725ef 100644 --- a/covalent_dispatcher/_db/models.py +++ b/covalent_dispatcher/_db/models.py @@ -92,10 +92,10 @@ class Lattice(Base): # Name of the file containing the serialized input data inputs_filename = Column(Text) - # Name of the file containing the serialized named args + # DEPRECATED: Name of the file containing the serialized named args named_args_filename = Column(Text) - # Name of the file containing the serialized named kwargs + # DEPRECATED: Name of the file containing the serialized named kwargs named_kwargs_filename = Column(Text) # name of the file containing the serialized output @@ -104,10 +104,10 @@ class Lattice(Base): # Name of the file containing the default electron hooks hooks_filename = Column(Text) - # Name of the file containing the set of cova imports + # DEPRECATED: Name of the file containing the set of cova imports cova_imports_filename = Column(Text) - # Name of the file containing the set of lattice imports + # DEPRECATED: Name of the file containing the set of lattice imports lattice_imports_filename = Column(Text) # Results directory (will be deprecated soon) diff --git a/covalent_dispatcher/_db/upsert.py b/covalent_dispatcher/_db/upsert.py index 3bd7f0ca7..70ef99a45 100644 --- a/covalent_dispatcher/_db/upsert.py +++ b/covalent_dispatcher/_db/upsert.py @@ -57,12 +57,8 @@ LATTICE_DOCSTRING_FILENAME = LATTICE_FILENAMES["doc"] LATTICE_ERROR_FILENAME = LATTICE_FILENAMES["error"] LATTICE_INPUTS_FILENAME = LATTICE_FILENAMES["inputs"] -LATTICE_NAMED_ARGS_FILENAME = LATTICE_FILENAMES["named_args"] -LATTICE_NAMED_KWARGS_FILENAME = LATTICE_FILENAMES["named_kwargs"] LATTICE_RESULTS_FILENAME = LATTICE_FILENAMES["result"] LATTICE_HOOKS_FILENAME = LATTICE_FILENAMES["hooks"] -LATTICE_COVA_IMPORTS_FILENAME = LATTICE_FILENAMES["cova_imports"] -LATTICE_LATTICE_IMPORTS_FILENAME = LATTICE_FILENAMES["lattice_imports"] LATTICE_STORAGE_TYPE = "file" CUSTOM_ASSETS_FIELD = "custom_asset_keys" @@ -108,12 +104,8 @@ def _lattice_data(session: Session, result: Result, electron_id: int = None) -> ("doc", LATTICE_DOCSTRING_FILENAME, result.lattice.__doc__), ("error", LATTICE_ERROR_FILENAME, result.error), ("inputs", LATTICE_INPUTS_FILENAME, result.lattice.inputs), - ("named_args", LATTICE_NAMED_ARGS_FILENAME, result.lattice.named_args), - ("named_kwargs", LATTICE_NAMED_KWARGS_FILENAME, result.lattice.named_kwargs), ("result", LATTICE_RESULTS_FILENAME, result._result), ("hooks", LATTICE_HOOKS_FILENAME, result.lattice.metadata["hooks"]), - ("cova_imports", LATTICE_COVA_IMPORTS_FILENAME, result.lattice.cova_imports), - ("lattice_imports", LATTICE_LATTICE_IMPORTS_FILENAME, result.lattice.lattice_imports), ]: digest, size = local_store.store_file(data_storage_path, filename, data) asset_record_kwargs = { @@ -161,12 +153,8 @@ def _lattice_data(session: Session, result: Result, electron_id: int = None) -> "workflow_executor_data": json.dumps(result.lattice.metadata["workflow_executor_data"]), "error_filename": LATTICE_ERROR_FILENAME, "inputs_filename": LATTICE_INPUTS_FILENAME, - "named_args_filename": LATTICE_NAMED_ARGS_FILENAME, - "named_kwargs_filename": LATTICE_NAMED_KWARGS_FILENAME, "results_filename": LATTICE_RESULTS_FILENAME, "hooks_filename": LATTICE_HOOKS_FILENAME, - "cova_imports_filename": LATTICE_COVA_IMPORTS_FILENAME, - "lattice_imports_filename": LATTICE_LATTICE_IMPORTS_FILENAME, "results_dir": results_dir, "root_dispatch_id": result.root_dispatch_id, "python_version": result.lattice.python_version, diff --git a/covalent_dispatcher/_db/write_result_to_db.py b/covalent_dispatcher/_db/write_result_to_db.py index 9d928c1ec..08da952ca 100644 --- a/covalent_dispatcher/_db/write_result_to_db.py +++ b/covalent_dispatcher/_db/write_result_to_db.py @@ -95,12 +95,8 @@ def transaction_insert_lattices_data( workflow_executor_data: str, error_filename: str, inputs_filename: str, - named_args_filename: str, - named_kwargs_filename: str, results_filename: str, hooks_filename: str, - cova_imports_filename: str, - lattice_imports_filename: str, results_dir: str, root_dispatch_id: str, created_at: dt, @@ -133,12 +129,8 @@ def transaction_insert_lattices_data( workflow_executor_data=workflow_executor_data, error_filename=error_filename, inputs_filename=inputs_filename, - named_args_filename=named_args_filename, - named_kwargs_filename=named_kwargs_filename, results_filename=results_filename, hooks_filename=hooks_filename, - cova_imports_filename=cova_imports_filename, - lattice_imports_filename=lattice_imports_filename, results_dir=results_dir, root_dispatch_id=root_dispatch_id, is_active=True, diff --git a/covalent_dispatcher/_service/app.py b/covalent_dispatcher/_service/app.py index 9a9c7d460..03e71186d 100644 --- a/covalent_dispatcher/_service/app.py +++ b/covalent_dispatcher/_service/app.py @@ -191,7 +191,7 @@ async def register(manifest: ResultSchema) -> ResultSchema: ) from e -@router.post("/dispatches/{dispatch_id}/subdispatches", status_code=201) +@router.post("/dispatches/{dispatch_id}/sublattices", status_code=201) async def register_subdispatch( manifest: ResultSchema, dispatch_id: str, diff --git a/covalent_dispatcher/_service/models.py b/covalent_dispatcher/_service/models.py index 2d2f7db10..18a33a071 100644 --- a/covalent_dispatcher/_service/models.py +++ b/covalent_dispatcher/_service/models.py @@ -41,11 +41,7 @@ class LatticeAssetKey(str, Enum): workflow_function_string = "workflow_function_string" doc = "doc" inputs = "inputs" - named_args = "named_args" - named_kwargs = "named_kwargs" hooks = "hooks" - cova_imports = "cova_imports" - lattice_imports = "lattice_imports" class ElectronAssetKey(str, Enum): diff --git a/covalent_ui/app.py b/covalent_ui/app.py index bf1d473eb..aa09d8854 100644 --- a/covalent_ui/app.py +++ b/covalent_ui/app.py @@ -133,7 +133,6 @@ def get_home(request: Request, rest_of_path: str): app_name, host=host, port=port, - debug=DEBUG, reload=RELOAD, log_config=log_config(), ) diff --git a/covalent_ui/result_webhook.py b/covalent_ui/result_webhook.py index 3caf03c10..f5d311421 100644 --- a/covalent_ui/result_webhook.py +++ b/covalent_ui/result_webhook.py @@ -22,7 +22,7 @@ import covalent_ui.app as ui_server from covalent._results_manager import Result from covalent._shared_files import logger -from covalent._shared_files.utils import get_ui_url +from covalent._shared_files.utils import get_named_params, get_ui_url from covalent_dispatcher._db.dispatchdb import encode_dict, extract_graph, extract_metadata app_log = logger.app_log @@ -78,8 +78,11 @@ def send_draw_request(lattice) -> None: graph = lattice.transport_graph.get_internal_graph_copy() - named_args = lattice.named_args.get_deserialized() - named_kwargs = lattice.named_kwargs.get_deserialized() + inputs = lattice.inputs.get_deserialized() + fn = lattice.workflow_function.get_deserialized() + args = inputs["args"] + kwargs = inputs["kwargs"] + named_args, named_kwargs = get_named_params(fn, args, kwargs) draw_request = json.dumps( { diff --git a/covalent_ui/webapp/yarn.lock b/covalent_ui/webapp/yarn.lock index 9bd5de6ca..30878bc9f 100644 --- a/covalent_ui/webapp/yarn.lock +++ b/covalent_ui/webapp/yarn.lock @@ -3259,21 +3259,23 @@ bn.js@^5.2.1: resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.2.1.tgz#0bc527a6a0d18d0aa8d5b0538ce4a77dccfa7b70" integrity sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ== -body-parser@1.19.2: - version "1.19.2" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.2.tgz#4714ccd9c157d44797b8b5607d72c0b89952f26e" - integrity sha512-SAAwOxgoCKMGs9uUAUFHygfLAyaniaoun6I8mFY9pRAJL9+Kec34aU+oIjDhTycub1jozEfEwx1W1IuOYxVSFw== +body-parser@1.20.2: + version "1.20.2" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.2.tgz#6feb0e21c4724d06de7ff38da36dad4f57a747fd" + integrity sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA== dependencies: bytes "3.1.2" - content-type "~1.0.4" + content-type "~1.0.5" debug "2.6.9" - depd "~1.1.2" - http-errors "1.8.1" + depd "2.0.0" + destroy "1.2.0" + http-errors "2.0.0" iconv-lite "0.4.24" - on-finished "~2.3.0" - qs "6.9.7" - raw-body "2.4.3" + on-finished "2.4.1" + qs "6.11.0" + raw-body "2.5.2" type-is "~1.6.18" + unpipe "1.0.0" bonjour@^3.5.0: version "3.5.0" @@ -3958,6 +3960,11 @@ content-type@~1.0.4: resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== +content-type@~1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918" + integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA== + convert-source-map@1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" @@ -3982,10 +3989,10 @@ cookie-signature@1.0.6: resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= -cookie@0.4.2: - version "0.4.2" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.2.tgz#0e41f24de5ecf317947c82fc789e06a884824432" - integrity sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA== +cookie@0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.6.0.tgz#2798b04b071b0ecbff0dbb62a505a8efa4e19051" + integrity sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw== copy-concurrently@^1.0.0: version "1.0.5" @@ -4613,6 +4620,11 @@ delayed-stream@~1.0.0: resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= +depd@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df" + integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw== + depd@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" @@ -4626,10 +4638,10 @@ des.js@^1.0.0: inherits "^2.0.1" minimalistic-assert "^1.0.0" -destroy@~1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" - integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= +destroy@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015" + integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg== detect-newline@^3.0.0: version "3.1.0" @@ -5013,13 +5025,14 @@ es-to-primitive@^1.2.1: is-date-object "^1.0.1" is-symbol "^1.0.2" -es5-ext@^0.10.35, es5-ext@^0.10.50: - version "0.10.60" - resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.60.tgz#e8060a86472842b93019c31c34865012449883f4" - integrity sha512-jpKNXIt60htYG59/9FGf2PYT3pwMpnEbNKysU+k/4FGwyGtMotOvcZOuW+EmXXYASRqYSXQfGL5cVIthOTgbkg== +es5-ext@^0.10.35, es5-ext@^0.10.50, es5-ext@^0.10.62, es5-ext@~0.10.14: + version "0.10.63" + resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.63.tgz#9c222a63b6a332ac80b1e373b426af723b895bd6" + integrity sha512-hUCZd2Byj/mNKjfP9jXrdVZ62B8KuA/VoK7X8nUh5qT+AxDmcbvZz041oDVZdbIN1qW6XY9VDNwzkvKnZvK2TQ== dependencies: es6-iterator "^2.0.3" es6-symbol "^3.1.3" + esniff "^2.0.1" next-tick "^1.1.0" es6-iterator@2.0.3, es6-iterator@^2.0.3: @@ -5281,6 +5294,16 @@ eslint@^7.11.0: text-table "^0.2.0" v8-compile-cache "^2.0.3" +esniff@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/esniff/-/esniff-2.0.1.tgz#a4d4b43a5c71c7ec51c51098c1d8a29081f9b308" + integrity sha512-kTUIGKQ/mDPFoJ0oVfcmyJn4iBDRptjNVIzwIFR7tqWXdVI9xfA2RMwY/gbSpJG3lkdWNEjLap/NqVHZiJsdfg== + dependencies: + d "^1.0.1" + es5-ext "^0.10.62" + event-emitter "^0.3.5" + type "^2.7.2" + espree@^7.3.0, espree@^7.3.1: version "7.3.1" resolved "https://registry.yarnpkg.com/espree/-/espree-7.3.1.tgz#f2df330b752c6f55019f8bd89b7660039c1bbbb6" @@ -5339,6 +5362,14 @@ etag@~1.8.1: resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= +event-emitter@^0.3.5: + version "0.3.5" + resolved "https://registry.yarnpkg.com/event-emitter/-/event-emitter-0.3.5.tgz#df8c69eef1647923c7157b9ce83840610b02cc39" + integrity sha512-D9rRn9y7kLPnJ+hMq7S/nhvoKwwvVJahBi2BPmx3bvbsEdK3W9ii8cBSGjP+72/LnM4n6fo3+dkCX5FeTQruXA== + dependencies: + d "1" + es5-ext "~0.10.14" + eventemitter3@^4.0.0: version "4.0.7" resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" @@ -5426,37 +5457,38 @@ expect@^26.6.0, expect@^26.6.2: jest-regex-util "^26.0.0" express@^4.17.1: - version "4.17.3" - resolved "https://registry.yarnpkg.com/express/-/express-4.17.3.tgz#f6c7302194a4fb54271b73a1fe7a06478c8f85a1" - integrity sha512-yuSQpz5I+Ch7gFrPCk4/c+dIBKlQUxtgwqzph132bsT6qhuzss6I8cLJQz7B3rFblzd6wtcI0ZbGltH/C4LjUg== + version "4.19.2" + resolved "https://registry.yarnpkg.com/express/-/express-4.19.2.tgz#e25437827a3aa7f2a827bc8171bbbb664a356465" + integrity sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q== dependencies: accepts "~1.3.8" array-flatten "1.1.1" - body-parser "1.19.2" + body-parser "1.20.2" content-disposition "0.5.4" content-type "~1.0.4" - cookie "0.4.2" + cookie "0.6.0" cookie-signature "1.0.6" debug "2.6.9" - depd "~1.1.2" + depd "2.0.0" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" - finalhandler "~1.1.2" + finalhandler "1.2.0" fresh "0.5.2" + http-errors "2.0.0" merge-descriptors "1.0.1" methods "~1.1.2" - on-finished "~2.3.0" + on-finished "2.4.1" parseurl "~1.3.3" path-to-regexp "0.1.7" proxy-addr "~2.0.7" - qs "6.9.7" + qs "6.11.0" range-parser "~1.2.1" safe-buffer "5.2.1" - send "0.17.2" - serve-static "1.14.2" + send "0.18.0" + serve-static "1.15.0" setprototypeof "1.2.0" - statuses "~1.5.0" + statuses "2.0.1" type-is "~1.6.18" utils-merge "1.0.1" vary "~1.1.2" @@ -5598,17 +5630,17 @@ fill-range@^7.0.1: dependencies: to-regex-range "^5.0.1" -finalhandler@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" - integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== +finalhandler@1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32" + integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg== dependencies: debug "2.6.9" encodeurl "~1.0.2" escape-html "~1.0.3" - on-finished "~2.3.0" + on-finished "2.4.1" parseurl "~1.3.3" - statuses "~1.5.0" + statuses "2.0.1" unpipe "~1.0.0" find-cache-dir@^2.1.0: @@ -5683,9 +5715,9 @@ flush-write-stream@^1.0.0: readable-stream "^2.3.6" follow-redirects@^1.0.0, follow-redirects@^1.15.0: - version "1.15.3" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.3.tgz#fe2f3ef2690afce7e82ed0b44db08165b207123a" - integrity sha512-1VzOtuEM8pC9SFU1E+8KfTjZyMztRsgEfwQl44z8A25uy13jSzTj6dyK2Df52iV0vgHCfBwLhDWevLn95w5v6Q== + version "1.15.6" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b" + integrity sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA== for-in@^1.0.2: version "1.0.2" @@ -6263,15 +6295,15 @@ http-deceiver@^1.2.7: resolved "https://registry.yarnpkg.com/http-deceiver/-/http-deceiver-1.2.7.tgz#fa7168944ab9a519d337cb0bec7284dc3e723d87" integrity sha1-+nFolEq5pRnTN8sL7HKE3D5yPYc= -http-errors@1.8.1: - version "1.8.1" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.8.1.tgz#7c3f28577cbc8a207388455dbd62295ed07bd68c" - integrity sha512-Kpk9Sm7NmI+RHhnj6OIWDI1d6fIoFAtFt9RLaTMRlg/8w49juAStsrBgp0Dp4OdxdVbRIeKhtCUvoi/RuAhO4g== +http-errors@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" + integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== dependencies: - depd "~1.1.2" + depd "2.0.0" inherits "2.0.4" setprototypeof "1.2.0" - statuses ">= 1.5.0 < 2" + statuses "2.0.1" toidentifier "1.0.1" http-errors@~1.6.2: @@ -6515,9 +6547,9 @@ ip-regex@^2.1.0: integrity sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk= ip@^1.1.0, ip@^1.1.5: - version "1.1.5" - resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a" - integrity sha1-vd7XARQpCCjAoDnnLvJfWq7ENUo= + version "1.1.9" + resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.9.tgz#8dfbcc99a754d07f425310b86a99546b1151e396" + integrity sha512-cyRxvOEpNHNtchU3Ln9KC/auJgup87llfQpQ+t5ghoC/UhL16SWzbueiCsdTnWmqAWl7LadfuwhlqmtOaqMHdQ== ipaddr.js@1.9.1, ipaddr.js@^1.9.0: version "1.9.1" @@ -8057,6 +8089,11 @@ minipass@^3.0.0, minipass@^3.1.1: dependencies: yallist "^4.0.0" +minipass@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-5.0.0.tgz#3e9788ffb90b694a5d0ec94479a45b5d8738133d" + integrity sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ== + minizlib@^2.1.1: version "2.1.2" resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931" @@ -8454,10 +8491,10 @@ obuf@^1.0.0, obuf@^1.1.2: resolved "https://registry.yarnpkg.com/obuf/-/obuf-1.1.2.tgz#09bea3343d41859ebd446292d11c9d4db619084e" integrity sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg== -on-finished@~2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" - integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= +on-finished@2.4.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f" + integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg== dependencies: ee-first "1.1.1" @@ -9733,10 +9770,12 @@ q@^1.1.2: resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7" integrity sha1-fjL3W0E4EpHQRhHxvxQQmsAGUdc= -qs@6.9.7: - version "6.9.7" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.9.7.tgz#4610846871485e1e048f44ae3b94033f0e675afe" - integrity sha512-IhMFgUmuNpyRfxA90umL7ByLlgRXu6tIfKPpF5TmcfRLlLCckfP/g3IQmju6jjpu+Hh8rA+2p6A27ZSPOOHdKw== +qs@6.11.0: + version "6.11.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" + integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== + dependencies: + side-channel "^1.0.4" query-string@^4.1.0: version "4.3.4" @@ -9798,13 +9837,13 @@ range-parser@^1.2.1, range-parser@~1.2.1: resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== -raw-body@2.4.3: - version "2.4.3" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.3.tgz#8f80305d11c2a0a545c2d9d89d7a0286fcead43c" - integrity sha512-UlTNLIcu0uzb4D2f4WltY6cVjLi+/jEN4lgEUj3E04tpMDpUlkBo/eSn6zou9hum2VMNpCCUone0O0WeJim07g== +raw-body@2.5.2: + version "2.5.2" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.2.tgz#99febd83b90e08975087e8f1f9419a149366b68a" + integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA== dependencies: bytes "3.1.2" - http-errors "1.8.1" + http-errors "2.0.0" iconv-lite "0.4.24" unpipe "1.0.0" @@ -10659,24 +10698,24 @@ semver@^7.2.1, semver@^7.3.2, semver@^7.3.5: dependencies: lru-cache "^6.0.0" -send@0.17.2: - version "0.17.2" - resolved "https://registry.yarnpkg.com/send/-/send-0.17.2.tgz#926622f76601c41808012c8bf1688fe3906f7820" - integrity sha512-UJYB6wFSJE3G00nEivR5rgWp8c2xXvJ3OPWPhmuteU0IKj8nKbG3DrjiOmLwpnHGYWAVwA69zmTm++YG0Hmwww== +send@0.18.0: + version "0.18.0" + resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be" + integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg== dependencies: debug "2.6.9" - depd "~1.1.2" - destroy "~1.0.4" + depd "2.0.0" + destroy "1.2.0" encodeurl "~1.0.2" escape-html "~1.0.3" etag "~1.8.1" fresh "0.5.2" - http-errors "1.8.1" + http-errors "2.0.0" mime "1.6.0" ms "2.1.3" - on-finished "~2.3.0" + on-finished "2.4.1" range-parser "~1.2.1" - statuses "~1.5.0" + statuses "2.0.1" serialize-javascript@^4.0.0: version "4.0.0" @@ -10705,15 +10744,15 @@ serve-index@^1.9.1: mime-types "~2.1.17" parseurl "~1.3.2" -serve-static@1.14.2: - version "1.14.2" - resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.2.tgz#722d6294b1d62626d41b43a013ece4598d292bfa" - integrity sha512-+TMNA9AFxUEGuC0z2mevogSnn9MXKb4fa7ngeRMJaaGv8vTwnIEkKi+QGvPt33HSnf8pRS+WGM0EbMtCJLKMBQ== +serve-static@1.15.0: + version "1.15.0" + resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540" + integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g== dependencies: encodeurl "~1.0.2" escape-html "~1.0.3" parseurl "~1.3.3" - send "0.17.2" + send "0.18.0" set-blocking@^2.0.0: version "2.0.0" @@ -11076,7 +11115,12 @@ static-extend@^0.1.1: define-property "^0.2.5" object-copy "^0.1.0" -"statuses@>= 1.4.0 < 2", "statuses@>= 1.5.0 < 2", statuses@~1.5.0: +statuses@2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63" + integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ== + +"statuses@>= 1.4.0 < 2": version "1.5.0" resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= @@ -11379,13 +11423,13 @@ tapable@^1.0.0, tapable@^1.1.3: integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA== tar@^6.0.2: - version "6.1.11" - resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.11.tgz#6760a38f003afa1b2ffd0ffe9e9abbd0eab3d621" - integrity sha512-an/KZQzQUkZCkuoAA64hM92X0Urb6VpRhAFllDzz44U2mcD5scmT3zBc4VgVpkugF580+DQn8eAFSyoQt0tznA== + version "6.2.1" + resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a" + integrity sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A== dependencies: chownr "^2.0.0" fs-minipass "^2.0.0" - minipass "^3.0.0" + minipass "^5.0.0" minizlib "^2.1.1" mkdirp "^1.0.3" yallist "^4.0.0" @@ -11690,6 +11734,11 @@ type@^2.5.0: resolved "https://registry.yarnpkg.com/type/-/type-2.6.0.tgz#3ca6099af5981d36ca86b78442973694278a219f" integrity sha512-eiDBDOmkih5pMbo9OqsqPRGMljLodLcwd5XD5JbtNB0o89xZAwynY9EdCDsJU7LtcVCClu9DvM7/0Ep1hYX3EQ== +type@^2.7.2: + version "2.7.2" + resolved "https://registry.yarnpkg.com/type/-/type-2.7.2.tgz#2376a15a3a28b1efa0f5350dcf72d24df6ef98d0" + integrity sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw== + typedarray-to-buffer@^3.1.5: version "3.1.5" resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" diff --git a/doc/source/api/cli.rst b/doc/source/api/cli.rst index 1dd83f117..22b5c1fea 100644 --- a/doc/source/api/cli.rst +++ b/doc/source/api/cli.rst @@ -5,5 +5,5 @@ The command line interface (CLI) tool is used to manage the Covalent server. .. click:: covalent_dispatcher._cli.cli:cli :prog: covalent - :commands: start,stop,restart,status,purge,logs,db,migrate-legacy-result-object,cluster + :commands: start,stop,restart,status,purge,logs,db,cluster :nested: full diff --git a/doc/source/api/executors/slurm.rst b/doc/source/api/executors/slurm.rst index e9df75496..1412cd7d8 100644 --- a/doc/source/api/executors/slurm.rst +++ b/doc/source/api/executors/slurm.rst @@ -133,27 +133,6 @@ Here the corresponding submit script contains the following commands: srun --ntasks-per-node 1 dcgmi profile --resume +.. note:: -sshproxy --------- - -Some users may need two-factor authentication (2FA) to connect to a cluster. This plugin supports one form of 2FA using the `sshproxy `_ service developed by NERSC. When this plugin is configured to support ``sshproxy``, the user's SSH key and certificate will be refreshed automatically by Covalent if either it does not exist or it is expired. We assume that the user has already `configured 2FA `_, used the ``sshproxy`` service on the command line without issue, and added the executable to their ``PATH``. Note that this plugin assumes the script is called ``sshproxy``, not ``sshproxy.sh``. Further note that using ``sshproxy`` within Covalent is not required; a user can still run it manually and provide ``ssh_key_file`` and ``cert_file`` in the plugin constructor. - -In order to enable ``sshproxy`` in this plugin, add the following block to your Covalent configuration while the server is stopped: - -.. code:: bash - - [executors.slurm.sshproxy] - hosts = [ "perlmutter-p1.nersc.gov" ] - password = "" - secret = "" - -For details on how to modify your Covalent configuration, refer to the documentation `here `_. - -Then, reinstall this plugin using ``pip install covalent-slurm-plugin[sshproxy]`` in order to pull in the ``oathtool`` package which will generate one-time passwords. - -The ``hosts`` parameter is a list of hostnames for which the ``sshproxy`` service will be used. If the address provided in the plugin constructor is not present in this list, ``sshproxy`` will not be used. The ``password`` is the user's password, not including the 6-digit OTP. The ``secret`` is the 2FA secret provided when a user registers a new device on `Iris `_. Rather than scan the QR code into an authenticator app, inspect the Oath Seed URL for a string labeled ``secret=...``, typically consisting of numbers and capital letters. Users can validate that correct OTP codes are being generated by using the command ``oathtool `` and using the 6-digit number returned in the "Test" option on the Iris 2FA page. Note that these values are stored in plaintext in the Covalent configuration file. If a user suspects credentials have been stolen or compromised, contact your systems administrator immediately to report the incident and request deactivation. - -.. autoclass:: covalent_slurm_plugin.SlurmExecutor - :members: - :inherited-members: +Each electron that uses the Slurm executor opens a separate SSH connection to the remote system. When executing 10 or more concurrent electrons, be mindful of client and/or server-side limitations on the total number of SSH connections. diff --git a/doc/source/tutorials/0_ClassicalMachineLearning/genai/requirements.txt b/doc/source/tutorials/0_ClassicalMachineLearning/genai/requirements.txt index d036f6ae7..2843addc7 100644 --- a/doc/source/tutorials/0_ClassicalMachineLearning/genai/requirements.txt +++ b/doc/source/tutorials/0_ClassicalMachineLearning/genai/requirements.txt @@ -3,9 +3,9 @@ bs4==0.0.1 covalent-azurebatch-plugin==0.12.0 diffusers==0.19.3 emoji==2.8.0 -Pillow==9.5.0 +Pillow==10.3.0 sentencepiece==0.1.99 -streamlit==1.25.0 +streamlit==1.30.0 torch==2.0.1 -transformers==4.31.0 +transformers==4.36.0 xformers==0.0.21 diff --git a/doc/source/tutorials/voice_cloning/requirements.txt b/doc/source/tutorials/voice_cloning/requirements.txt index 534f3b8e0..0ef381da8 100644 --- a/doc/source/tutorials/voice_cloning/requirements.txt +++ b/doc/source/tutorials/voice_cloning/requirements.txt @@ -5,8 +5,8 @@ pydub==0.25.1 pytube==15.0.0 scipy==1.11.3 soundfile==0.12.1 -streamlit==1.28.1 +streamlit==1.30.0 torch==2.1.0 torchaudio==2.1.0 -transformers==4.33.3 +transformers==4.38.0 TTS==0.19.1 diff --git a/doc/source/version_migrations/index.rst b/doc/source/version_migrations/index.rst index 48995ae73..a93a38901 100644 --- a/doc/source/version_migrations/index.rst +++ b/doc/source/version_migrations/index.rst @@ -52,20 +52,4 @@ If you are using Covalent v0.110.2 or later you can upgrade to Covalent v0.177.0 $ covalent start Covalent server has started at http://localhost:48008 - -6. Use the data migration tool to migrate any workflows you want to port to the new version of Covalent. - - For example, for a workflow with dispatch ID :code:`e0ba03a2-fdc0-474e-9997-7fa8e82932c5`: - - .. code:: bash - - $ covalent migrate-legacy-result-object ./results/e0ba03a2-fdc0-474e-9997-7fa8e82932c5/result.pkl - Processing result object for dispatch e0ba03a2-fdc0-474e-9997-7fa8e82932c5 - Processing node 0 - Processing node 1 - Processing node 2 - Processing node 3 - Processed transport graph - Processed lattice - -7. Navigate to the UI (http://localhost:48008) to view your workflows. +6. Navigate to the UI (http://localhost:48008) to view your workflows. diff --git a/requirements-client.txt b/requirements-client.txt index ede6a20e3..1d74ffb04 100644 --- a/requirements-client.txt +++ b/requirements-client.txt @@ -7,6 +7,5 @@ furl>=2.1.3 networkx>=2.8.6 pydantic>=2.1.1 requests>=2.24.0 -simplejson>=3.17.6 toml>=0.10.2 watchdog>=2.0.3 diff --git a/requirements.txt b/requirements.txt index f9d0fa34c..15ecc9091 100644 --- a/requirements.txt +++ b/requirements.txt @@ -16,11 +16,9 @@ python-multipart>=0.0.6 python-socketio>=5.7.1 requests>=2.24.0 rich>=12.0.0,<=13.3.5 -simplejson>=3.17.6 sqlalchemy>=1.4.37,<2.0.0 sqlalchemy_utils>=0.38.3 toml>=0.10.2 typing-extensions>=4.8.0 -uvicorn[standard]==0.18.3 +uvicorn[standard] watchdog>=2.2.1 -werkzeug>=2.0.3 diff --git a/setup.py b/setup.py index 317a469c7..6762fdf6d 100644 --- a/setup.py +++ b/setup.py @@ -28,6 +28,9 @@ with open("VERSION") as f: version = f.read().strip() +# Allow installing a particular commit for testing +commit_sha = os.getenv("COVALENT_COMMIT_SHA") +artifact_id = commit_sha if commit_sha else f"v{version}" requirements_file = "requirements.txt" exclude_modules = [ @@ -202,7 +205,7 @@ def find_sources(self): "version": version, "maintainer": "Agnostiq", "url": "https://github.com/AgnostiqHQ/covalent", - "download_url": f"https://github.com/AgnostiqHQ/covalent/archive/v{version}.tar.gz", + "download_url": f"https://github.com/AgnostiqHQ/covalent/archive/{artifact_id}.tar.gz", "license": "Apache License 2.0", "author": "Agnostiq", "author_email": "support@agnostiq.ai", diff --git a/tests/covalent_dispatcher_tests/_cli/cli_test.py b/tests/covalent_dispatcher_tests/_cli/cli_test.py index a50b083da..aac119712 100644 --- a/tests/covalent_dispatcher_tests/_cli/cli_test.py +++ b/tests/covalent_dispatcher_tests/_cli/cli_test.py @@ -61,7 +61,6 @@ def test_cli_commands(): "db", "deploy", "logs", - "migrate-legacy-result-object", "purge", "restart", "start", diff --git a/tests/covalent_dispatcher_tests/_cli/migrate_test.py b/tests/covalent_dispatcher_tests/_cli/migrate_test.py deleted file mode 100644 index 18289bcca..000000000 --- a/tests/covalent_dispatcher_tests/_cli/migrate_test.py +++ /dev/null @@ -1,249 +0,0 @@ -# Copyright 2021 Agnostiq Inc. -# -# This file is part of Covalent. -# -# Licensed under the Apache License 2.0 (the "License"). A copy of the -# License may be obtained with this software package or at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Use of this file is prohibited except in compliance with the License. -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Testing results_dir migration script""" - -import pickle -from pathlib import Path - -from covalent._results_manager import Result -from covalent._shared_files.defaults import attr_prefix, generator_prefix, subscript_prefix -from covalent._workflow.transport import TransportableObject, _TransportGraph -from covalent_dispatcher._cli.migrate import ( - migrate_pickled_result_object, - process_lattice, - process_node, - process_result_object, - process_transport_graph, - to_decoded_electron_collection, -) - -dispatch_id = "652dc473-fa37-4846-85f3-b314204fd432" -sub_dispatch_id = "c333d0b3-8711-4595-9374-421f5482a592" - -basedir = Path(__file__).parent -sample_results_dir = basedir / Path("sample_results_dir") -result_pkl = sample_results_dir / dispatch_id / "result.pkl" - -# task node 0, parameter node 1 -# attribute node 2 -# sublattice node 3 -# task node 4, generator nodes 5, 6 -# subscript node 7 - - -def get_sample_result_object(): - with open(result_pkl, "rb") as f: - result_object = pickle.load(f) - return result_object - - -def compare_nodes_and_edges(tg_orig: _TransportGraph, tg_new: _TransportGraph): - """Convenience function for comparing a legacy transport graph with a processed one.""" - - # Check metadata - for n in tg_new._graph.nodes: - metadata = tg_new._graph.nodes[n]["metadata"] - assert "deps" in metadata - assert "call_before" in metadata - assert "call_after" in metadata - - # Check other node attributes - task_node = tg_new._graph.nodes[0] - orig_output = tg_orig._graph.nodes[0]["output"] - - assert isinstance(task_node["output"], TransportableObject) - assert task_node["output"].get_deserialized().__dict__ == orig_output.__dict__ - - collection_node = tg_new._graph.nodes[1] - assert ( - collection_node["function"].get_serialized() - == TransportableObject(to_decoded_electron_collection).get_serialized() - ) - - param_node = tg_new._graph.nodes[2] - orig_output = tg_orig._graph.nodes[2]["output"] - orig_value = tg_orig._graph.nodes[2]["value"] - - assert isinstance(param_node["output"], TransportableObject) - assert isinstance(param_node["value"], TransportableObject) - assert param_node["output"].get_deserialized() == orig_output - - param_node = tg_new._graph.nodes[3] - orig_output = tg_orig._graph.nodes[3]["output"] - orig_value = tg_orig._graph.nodes[3]["value"] - - assert isinstance(param_node["output"], TransportableObject) - assert isinstance(param_node["value"], TransportableObject) - assert param_node["output"].get_deserialized() == orig_output - - attr_node = tg_new._graph.nodes[4] - orig_output = tg_orig._graph.nodes[4]["output"] - - assert isinstance(attr_node["output"], TransportableObject) - assert attr_node["output"].get_deserialized() == orig_output - assert "attribute_name" not in attr_node - assert attr_prefix not in attr_node["name"] - - subl_node = tg_new._graph.nodes[5] - orig_output = tg_orig._graph.nodes[5]["output"] - - assert isinstance(subl_node["output"], TransportableObject) - assert isinstance(subl_node["sublattice_result"], Result) - assert subl_node["output"].get_deserialized() == orig_output - - task_node = tg_new._graph.nodes[6] - orig_output = tg_orig._graph.nodes[6]["output"] - - assert isinstance(task_node["output"], TransportableObject) - assert task_node["output"].get_deserialized() == orig_output - - gen_node = tg_new._graph.nodes[7] - orig_output = tg_orig._graph.nodes[7]["output"] - - assert isinstance(gen_node["output"], TransportableObject) - assert gen_node["output"].get_deserialized() == orig_output - assert "key" not in gen_node - assert generator_prefix not in gen_node["name"] - - gen_node = tg_new._graph.nodes[8] - orig_output = tg_orig._graph.nodes[8]["output"] - - assert isinstance(gen_node["output"], TransportableObject) - assert gen_node["output"].get_deserialized() == orig_output - assert "key" not in gen_node - assert generator_prefix not in gen_node["name"] - - subscript_node = tg_new._graph.nodes[9] - orig_output = tg_orig._graph.nodes[9]["output"] - - assert isinstance(subscript_node["output"], TransportableObject) - assert subscript_node["output"].get_deserialized() == orig_output - assert "key" not in subscript_node - assert subscript_prefix not in subscript_node["name"] - - assert tg_orig._graph.edges == tg_new._graph.edges - - -def test_process_legacy_node(): - """Test process_node""" - - ro = get_sample_result_object() - ro_orig = get_sample_result_object() - tg = ro.lattice.transport_graph - tg_orig = ro_orig.lattice.transport_graph - - task_node = tg._graph.nodes[0] - orig_output = tg_orig._graph.nodes[0]["output"] - process_node(task_node) - - param_node = tg._graph.nodes[2] - orig_output = tg_orig._graph.nodes[2]["output"] - orig_value = tg_orig._graph.nodes[2]["value"] - process_node(param_node) - - param_node = tg._graph.nodes[3] - orig_output = tg_orig._graph.nodes[3]["output"] - orig_value = tg_orig._graph.nodes[3]["value"] - process_node(param_node) - - attr_node = tg._graph.nodes[4] - orig_output = tg_orig._graph.nodes[4]["output"] - assert "attribute_name" in attr_node - assert attr_prefix in attr_node["name"] - process_node(attr_node) - - subl_node = tg._graph.nodes[5] - orig_output = tg_orig._graph.nodes[5]["output"] - assert "sublattice_result" in subl_node - process_node(subl_node) - - task_node = tg._graph.nodes[6] - orig_output = tg_orig._graph.nodes[6]["output"] - process_node(task_node) - - gen_node = tg._graph.nodes[7] - orig_output = tg_orig._graph.nodes[7]["output"] - assert "key" in gen_node - assert generator_prefix in gen_node["name"] - process_node(gen_node) - - gen_node = tg._graph.nodes[8] - orig_output = tg_orig._graph.nodes[8]["output"] - assert "key" in gen_node - assert generator_prefix in gen_node["name"] - process_node(gen_node) - - subscript_node = tg._graph.nodes[9] - orig_output = tg_orig._graph.nodes[9]["output"] - assert "key" in subscript_node - assert subscript_prefix in subscript_node["name"] - process_node(subscript_node) - - -def test_process_transport_graph(): - """Test process_transport_graph""" - - ro = get_sample_result_object() - - tg = ro.lattice.transport_graph - tg_new = process_transport_graph(tg) - compare_nodes_and_edges(tg, tg_new) - assert "dirty_nodes" in tg_new.__dict__ - - -def test_process_lattice(): - """Test process_lattice""" - - ro = get_sample_result_object() - ro_orig = get_sample_result_object() - lattice = process_lattice(ro._lattice) - lattice.named_args = lattice.named_args.get_deserialized() - lattice.named_kwargs = lattice.named_kwargs.get_deserialized() - - assert isinstance(lattice.workflow_function, TransportableObject) - assert list(lattice.named_args.keys()) == ["z"] - assert list(lattice.named_kwargs.keys()) == ["zz"] - assert lattice.metadata["executor_data"]["short_name"] == "local" - assert lattice.metadata["workflow_executor"] == "local" - assert lattice.metadata["workflow_executor_data"] == {} - assert lattice.metadata["deps"] == {} - assert lattice.metadata["call_before"] == [] - assert lattice.metadata["call_after"] == [] - - -def test_process_result_object(): - """Test process_result_object""" - - ro = get_sample_result_object() - old_inputs = ro._inputs - ro_new = process_result_object(ro) - inputs = ro_new.inputs.get_deserialized() - assert old_inputs["args"] == inputs["args"] - assert old_inputs["kwargs"] == inputs["kwargs"] - assert isinstance(ro_new._result, TransportableObject) - assert "dirty_nodes" in ro_new.lattice.transport_graph.__dict__ - - -def test_migrate_pickled_result_object(mocker): - """Test migrate_pickled_result_object""" - - mock_process_ro = mocker.patch("covalent_dispatcher._cli.migrate.process_result_object") - mock_persist = mocker.patch("covalent_dispatcher._db.update.persist") - - migrate_pickled_result_object(result_pkl) - mock_process_ro.assert_called_once() - mock_persist.assert_called_once() diff --git a/tests/covalent_dispatcher_tests/_cli/sample_results_dir/652dc473-fa37-4846-85f3-b314204fd432/result.pkl b/tests/covalent_dispatcher_tests/_cli/sample_results_dir/652dc473-fa37-4846-85f3-b314204fd432/result.pkl deleted file mode 100644 index 1fe2d20c7..000000000 Binary files a/tests/covalent_dispatcher_tests/_cli/sample_results_dir/652dc473-fa37-4846-85f3-b314204fd432/result.pkl and /dev/null differ diff --git a/tests/covalent_dispatcher_tests/_dal/importers/result_import_test.py b/tests/covalent_dispatcher_tests/_dal/importers/result_import_test.py index 819f88bc6..964e7cbc5 100644 --- a/tests/covalent_dispatcher_tests/_dal/importers/result_import_test.py +++ b/tests/covalent_dispatcher_tests/_dal/importers/result_import_test.py @@ -275,8 +275,8 @@ def test_import_result_with_custom_assets(mocker, test_db): prefix="covalent-" ) as srv_dir: manifest = get_mock_result(dispatch_id, sdk_dir) - manifest.lattice.custom_assets = {"custom_lattice_asset": AssetSchema(size=0)} - manifest.lattice.transport_graph.nodes[0].custom_assets = { + manifest.lattice.assets._custom = {"custom_lattice_asset": AssetSchema(size=0)} + manifest.lattice.transport_graph.nodes[0].assets._custom = { "custom_electron_asset": AssetSchema(size=0) } filtered_res = import_result(manifest, srv_dir, None) diff --git a/tests/covalent_dispatcher_tests/_dal/lattice_test.py b/tests/covalent_dispatcher_tests/_dal/lattice_test.py index 7a55ac23f..f0f2d9a1a 100644 --- a/tests/covalent_dispatcher_tests/_dal/lattice_test.py +++ b/tests/covalent_dispatcher_tests/_dal/lattice_test.py @@ -83,9 +83,6 @@ def test_lattice_attributes(test_db, mocker): workflow_function = lat.get_value("workflow_function").get_deserialized() assert workflow_function(42) == 42 - res.lattice.lattice_imports == lat.get_value("lattice_imports") - res.lattice.cova_imports == lat.get_value("cova_imports") - def test_lattice_restricted_attributes(test_db, mocker): res = get_mock_result() diff --git a/tests/covalent_dispatcher_tests/_db/update_test.py b/tests/covalent_dispatcher_tests/_db/update_test.py index 567c83bc9..6e7dfb4c9 100644 --- a/tests/covalent_dispatcher_tests/_db/update_test.py +++ b/tests/covalent_dispatcher_tests/_db/update_test.py @@ -154,19 +154,6 @@ def test_result_persist_workflow_1(test_db, result_1, mocker): assert executor_data["short_name"] == le.short_name() assert executor_data["attributes"] == le.__dict__ - saved_named_args = local_store.load_file( - storage_path=lattice_storage_path, filename=lattice_row.named_args_filename - ) - - saved_named_kwargs = local_store.load_file( - storage_path=lattice_storage_path, filename=lattice_row.named_kwargs_filename - ) - saved_named_args_raw = saved_named_args.get_deserialized() - saved_named_kwargs_raw = saved_named_kwargs.get_deserialized() - - assert saved_named_args_raw == {} - assert saved_named_kwargs_raw == {"a": 1, "b": 2} - # Check that the electron records are as expected assert len(electron_rows) == 6 for electron in electron_rows: diff --git a/tests/covalent_dispatcher_tests/_db/write_result_to_db_test.py b/tests/covalent_dispatcher_tests/_db/write_result_to_db_test.py index 26b114913..310367df7 100644 --- a/tests/covalent_dispatcher_tests/_db/write_result_to_db_test.py +++ b/tests/covalent_dispatcher_tests/_db/write_result_to_db_test.py @@ -59,8 +59,6 @@ WORKFLOW_EXECUTOR_DATA_FILENAME = "workflow_executor_data.pkl" ERROR_FILENAME = "error.txt" INPUTS_FILENAME = "inputs.pkl" -NAMED_ARGS_FILENAME = "named_args.pkl" -NAMED_KWARGS_FILENAME = "named_kwargs.pkl" RESULTS_FILENAME = "results.pkl" VALUE_FILENAME = "value.pkl" STDOUT_FILENAME = "stdout.log" @@ -68,8 +66,6 @@ ERROR_FILENAME = "error.log" TRANSPORT_GRAPH_FILENAME = "transport_graph.pkl" HOOKS_FILENAME = "hooks.pkl" -COVA_IMPORTS_FILENAME = "cova_imports.json" -LATTICE_IMPORTS_FILENAME = "lattice_imports.txt" RESULTS_DIR = "/tmp/results" @@ -126,12 +122,8 @@ def get_lattice_kwargs( workflow_executor_data=json.dumps({}), error_filename=ERROR_FILENAME, inputs_filename=INPUTS_FILENAME, - named_args_filename=NAMED_ARGS_FILENAME, - named_kwargs_filename=NAMED_KWARGS_FILENAME, results_filename=RESULTS_FILENAME, hooks_filename=HOOKS_FILENAME, - cova_imports_filename=COVA_IMPORTS_FILENAME, - lattice_imports_filename=LATTICE_IMPORTS_FILENAME, results_dir=RESULTS_DIR, root_dispatch_id="dispatch_1", created_at=None, @@ -159,12 +151,8 @@ def get_lattice_kwargs( "workflow_executor_data": workflow_executor_data, "error_filename": error_filename, "inputs_filename": inputs_filename, - "named_args_filename": named_args_filename, - "named_kwargs_filename": named_kwargs_filename, "results_filename": results_filename, "hooks_filename": hooks_filename, - "cova_imports_filename": cova_imports_filename, - "lattice_imports_filename": lattice_imports_filename, "results_dir": results_dir, "root_dispatch_id": root_dispatch_id, "created_at": created_at, @@ -257,8 +245,8 @@ def test_insert_lattices_data(test_db, mocker): lattice_args = get_lattice_kwargs( dispatch_id=f"dispatch_{i + 1}", name=f"workflow_{i + 1}", - docstring_filename=f"docstring_{i+1}.txt", - storage_path=f"results/dispatch_{i+1}/", + docstring_filename=f"docstring_{i + 1}.txt", + storage_path=f"results/dispatch_{i + 1}/", executor="dask", workflow_executor="dask", created_at=cur_time, @@ -276,22 +264,18 @@ def test_insert_lattices_data(test_db, mocker): assert lattice.dispatch_id == f"dispatch_{i + 1}" assert lattice.electron_id is None assert lattice.name == f"workflow_{i + 1}" - assert lattice.docstring_filename == f"docstring_{i+1}.txt" + assert lattice.docstring_filename == f"docstring_{i + 1}.txt" assert lattice.status == "RUNNING" assert lattice.storage_type == STORAGE_TYPE - assert lattice.storage_path == f"results/dispatch_{i+1}/" + assert lattice.storage_path == f"results/dispatch_{i + 1}/" assert lattice.function_filename == FUNCTION_FILENAME assert lattice.function_string_filename == FUNCTION_STRING_FILENAME assert lattice.executor == "dask" assert lattice.workflow_executor == "dask" assert lattice.error_filename == ERROR_FILENAME assert lattice.inputs_filename == INPUTS_FILENAME - assert lattice.named_args_filename == NAMED_ARGS_FILENAME - assert lattice.named_kwargs_filename == NAMED_KWARGS_FILENAME assert lattice.results_filename == RESULTS_FILENAME assert lattice.hooks_filename == HOOKS_FILENAME - assert lattice.cova_imports_filename == COVA_IMPORTS_FILENAME - assert lattice.lattice_imports_filename == LATTICE_IMPORTS_FILENAME assert lattice.results_dir == RESULTS_DIR assert lattice.root_dispatch_id == f"dispatch_{i + 1}" assert ( diff --git a/tests/covalent_dispatcher_tests/_service/app_test.py b/tests/covalent_dispatcher_tests/_service/app_test.py index 7877fe673..4615e35c5 100644 --- a/tests/covalent_dispatcher_tests/_service/app_test.py +++ b/tests/covalent_dispatcher_tests/_service/app_test.py @@ -206,7 +206,7 @@ def test_register_sublattice(mocker, app, client, mock_manifest): ) mocker.patch("covalent_dispatcher._service.app.cancel_all_with_status") resp = client.post( - "/api/v2/dispatches/parent_dispatch/subdispatches", + "/api/v2/dispatches/parent_dispatch/sublattices", data=mock_manifest.json(), ) diff --git a/tests/covalent_tests/serialize/lattice_serialization_test.py b/tests/covalent_tests/serialize/lattice_serialization_test.py index 4247b6230..709041bde 100644 --- a/tests/covalent_tests/serialize/lattice_serialization_test.py +++ b/tests/covalent_tests/serialize/lattice_serialization_test.py @@ -89,10 +89,10 @@ def workflow(x, y): with tempfile.TemporaryDirectory() as d: manifest = serialize_lattice(workflow, d) - assert ["custom_lat_asset"] == list(manifest.custom_assets.keys()) + assert ["custom_lat_asset"] == list(manifest.assets._custom.keys()) node_0 = manifest.transport_graph.nodes[0] - assert "custom_electron_asset" in node_0.custom_assets + assert "custom_electron_asset" in node_0.assets._custom node_1 = manifest.transport_graph.nodes[1] - assert not node_1.custom_assets + assert not node_1.assets._custom diff --git a/tests/covalent_tests/triggers/database_trigger_test.py b/tests/covalent_tests/triggers/database_trigger_test.py index 734eb2e06..8927417be 100644 --- a/tests/covalent_tests/triggers/database_trigger_test.py +++ b/tests/covalent_tests/triggers/database_trigger_test.py @@ -61,8 +61,8 @@ def test_database_trigger_observe(mocker, where_clauses, database_trigger): database_trigger.where_clauses = where_clauses database_trigger.trigger = mocker.MagicMock() - mock_db_engine = mocker.patch("covalent.triggers.database_trigger.create_engine") - mock_session = mocker.patch("covalent.triggers.database_trigger.Session") + mock_db_engine = mocker.patch("sqlalchemy.create_engine") + mock_session = mocker.patch("sqlalchemy.orm.Session") mock_event = mocker.patch("covalent.triggers.database_trigger.Event") mock_sleep = mocker.patch("covalent.triggers.database_trigger.time.sleep") diff --git a/tests/covalent_tests/workflow/dispatch_source_test.py b/tests/covalent_tests/workflow/dispatch_source_test.py deleted file mode 100644 index 94d588b6b..000000000 --- a/tests/covalent_tests/workflow/dispatch_source_test.py +++ /dev/null @@ -1,94 +0,0 @@ -# Copyright 2021 Agnostiq Inc. -# -# This file is part of Covalent. -# -# Licensed under the Apache License 2.0 (the "License"). A copy of the -# License may be obtained with this software package or at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Use of this file is prohibited except in compliance with the License. -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests for writing the dispatch_source.py file""" - -import pytest - -from covalent._results_manager.result import _filter_cova_decorators - -COVA_IMPORTS = {"covalent", "lattice", "electron", "ct", "cova", "etron"} - - -INPUT1 = "\n".join( - [ - "@covalent.electron(", - ' executor="local"', - ")", - "def identity(x):", - " return x", - "", - "@covalent.electron", - "@covalent.lattice", - "@covalent.electron(", - ' executor="local"', - ")", - "def double(x):", - " return 2*x", - ] -) - -INPUT2 = INPUT1.replace("covalent", "ct") -INPUT3 = INPUT1.replace("covalent", "cova") -INPUT4 = INPUT1.replace("ct.electron", "electron") -INPUT5 = INPUT1.replace("ct.electron", "etron") -INPUT6 = INPUT1.replace("ct.lattice", "lattice") - -OUTPUT1 = "\n".join( - [ - "# @covalent.electron(", - '# executor="local"', - "# )", - "def identity(x):", - " return x", - "", - "# @covalent.electron", - "# @covalent.lattice", - "# @covalent.electron(", - '# executor="local"', - "# )", - "def double(x):", - " return 2*x", - ] -) - -OUTPUT2 = OUTPUT1.replace("covalent", "ct") -OUTPUT3 = OUTPUT1.replace("covalent", "cova") -OUTPUT4 = OUTPUT1.replace("ct.electron", "electron") -OUTPUT5 = OUTPUT1.replace("ct.electron", "etron") -OUTPUT6 = OUTPUT1.replace("ct.lattice", "lattice") - - -@pytest.mark.parametrize( - "input_str, expected_str", - [ - (INPUT1, OUTPUT1), - (INPUT2, OUTPUT2), - (INPUT3, OUTPUT3), - (INPUT4, OUTPUT4), - (INPUT5, OUTPUT5), - (INPUT6, OUTPUT6), - ], -) -def test_filter_cova_decorators( - input_str, - expected_str, -): - """Test the filtering out of Covalent-related decorators.""" - - output_str = _filter_cova_decorators(input_str, COVA_IMPORTS) - - assert output_str == expected_str diff --git a/tests/covalent_tests/workflow/electron_test.py b/tests/covalent_tests/workflow/electron_test.py index 2d5936ed3..327673b6f 100644 --- a/tests/covalent_tests/workflow/electron_test.py +++ b/tests/covalent_tests/workflow/electron_test.py @@ -33,7 +33,6 @@ _build_sublattice_graph, filter_null_metadata, get_serialized_function_str, - to_decoded_electron_collection, ) from covalent._workflow.lattice import Lattice from covalent._workflow.transport import TransportableObject, encode_metadata @@ -238,20 +237,6 @@ def test_wait_for_post_processing_when_returning_waiting_electron(): assert workflow_2.workflow_function.get_deserialized()() == 64 -def test_collection_node_helper_electron(): - """Unit test for `to_decoded_electron_collection`""" - - list_collection = [ - TransportableObject.make_transportable(1), - TransportableObject.make_transportable(2), - ] - - dict_collection = {"a": list_collection[0], "b": list_collection[1]} - assert to_decoded_electron_collection(x=list_collection) == [1, 2] - - assert to_decoded_electron_collection(x=dict_collection) == {"a": 1, "b": 2} - - def test_injected_inputs_are_not_in_tg(): """Test that arguments to electrons injected by calldeps aren't added to the transport graph""" diff --git a/tests/covalent_tests/workflow/lattice_serialization_test.py b/tests/covalent_tests/workflow/lattice_serialization_test.py index 72d962d0d..4be41091a 100644 --- a/tests/covalent_tests/workflow/lattice_serialization_test.py +++ b/tests/covalent_tests/workflow/lattice_serialization_test.py @@ -55,7 +55,6 @@ def workflow(x): return f(x) workflow.build_graph(5) - workflow.cova_imports = ["dummy_module"] json_workflow = workflow.serialize_to_json() diff --git a/tests/covalent_ui_backend_tests/functional_tests/webhook_test.py b/tests/covalent_ui_backend_tests/functional_tests/webhook_test.py index 25b696f35..50e34e23d 100644 --- a/tests/covalent_ui_backend_tests/functional_tests/webhook_test.py +++ b/tests/covalent_ui_backend_tests/functional_tests/webhook_test.py @@ -70,10 +70,9 @@ async def test_send_update(): assert response is None -@pytest.mark.skip(reason="Test is breaking, need to fix see PR #1728") def test_send_draw_request(): """Test draw request""" - workflow = get_mock_simple_workflow() - lattice = Lattice.deserialize_from_json(workflow.serialize_to_json()) + lattice = get_mock_simple_workflow() + lattice.build_graph(3) response = send_draw_request(lattice) assert response is None diff --git a/tests/stress_tests/scripts/mnist_sublattices.py b/tests/stress_tests/scripts/mnist_sublattices.py index 31ad46fc8..2b0e0aab0 100644 --- a/tests/stress_tests/scripts/mnist_sublattices.py +++ b/tests/stress_tests/scripts/mnist_sublattices.py @@ -146,7 +146,7 @@ def test( correct += (pred.argmax(1) == y).type(torch.float).sum().item() test_loss /= num_batches correct /= size - print(f"Test Error: \n Accuracy: {(100*correct):>0.1f}%, Avg loss: {test_loss:>8f} \n") + print(f"Test Error: \n Accuracy: {(100 * correct):>0.1f}%, Avg loss: {test_loss:>8f} \n") def train_model( diff --git a/tests/stress_tests/scripts/sublattices_mixed.py b/tests/stress_tests/scripts/sublattices_mixed.py index 4dc085f0f..d5984b55c 100644 --- a/tests/stress_tests/scripts/sublattices_mixed.py +++ b/tests/stress_tests/scripts/sublattices_mixed.py @@ -147,7 +147,7 @@ def test( correct += (pred.argmax(1) == y).type(torch.float).sum().item() test_loss /= num_batches correct /= size - print(f"Test Error: \n Accuracy: {(100*correct):>0.1f}%, Avg loss: {test_loss:>8f} \n") + print(f"Test Error: \n Accuracy: {(100 * correct):>0.1f}%, Avg loss: {test_loss:>8f} \n") def train_model( diff --git a/tests/stress_tests/scripts/tasks.py b/tests/stress_tests/scripts/tasks.py index 55d9ab8c9..181c4c33a 100644 --- a/tests/stress_tests/scripts/tasks.py +++ b/tests/stress_tests/scripts/tasks.py @@ -175,7 +175,7 @@ def test( correct += (pred.argmax(1) == y).type(torch.float).sum().item() test_loss /= num_batches correct /= size - print(f"Test Error: \n Accuracy: {(100*correct):>0.1f}%, Avg loss: {test_loss:>8f} \n") + print(f"Test Error: \n Accuracy: {(100 * correct):>0.1f}%, Avg loss: {test_loss:>8f} \n") def train_model(