diff --git a/diracx-cli/src/diracx/cli/jobs.py b/diracx-cli/src/diracx/cli/jobs.py index 21f33b28..ceeed695 100644 --- a/diracx-cli/src/diracx/cli/jobs.py +++ b/diracx-cli/src/diracx/cli/jobs.py @@ -4,7 +4,8 @@ __all__ = ("app",) import json -from typing import Annotated +import re +from typing import Annotated, cast from rich.console import Console from rich.table import Table @@ -52,29 +53,66 @@ async def search( ], condition: Annotated[list[SearchSpec], Option(parser=parse_condition)] = [], all: bool = False, + page: int = 1, + per_page: int = 10, ): async with DiracClient() as api: - jobs = await api.jobs.search( + jobs, content_range = await api.jobs.search( parameters=None if all else parameter, search=condition if condition else None, + page=page, + per_page=per_page, + cls=lambda _, jobs, headers: ( + jobs, + ContentRange(headers.get("Content-Range", "jobs")), + ), ) - display(jobs, "jobs") - -def display(data, unit: str): + display(jobs, cast(ContentRange, content_range)) + + +class ContentRange: + unit: str | None = None + start: int | None = None + end: int | None = None + total: int | None = None + + def __init__(self, header: str): + if match := re.fullmatch(r"(\w+) (\d+-\d+|\*)/(\d+|\*)", header): + self.unit, range, total = match.groups() + self.total = int(total) + if range != "*": + self.start, self.end = map(int, range.split("-")) + elif match := re.fullmatch(r"\w+", header): + self.unit = match.group() + + @property + def caption(self): + if self.start is None and self.end is None: + range_str = "all" + else: + range_str = ( + f"{self.start if self.start is not None else 'unknown'}-" + f"{self.end if self.end is not None else 'unknown'} " + f"of {self.total or 'unknown'}" + ) + return f"Showing {range_str} {self.unit}" + + +def display(data, content_range: ContentRange): output_format = get_diracx_preferences().output_format match output_format: case OutputFormats.JSON: print(json.dumps(data, indent=2)) case OutputFormats.RICH: - display_rich(data, unit) + display_rich(data, content_range) case _: raise NotImplementedError(output_format) -def display_rich(data, unit: str) -> None: +def display_rich(data, content_range: ContentRange) -> None: if not data: - print(f"No {unit} found") + print(f"No {content_range.unit} found") return console = Console() @@ -83,7 +121,7 @@ def display_rich(data, unit: str) -> None: table = Table( "Parameter", "Value", - caption=f"Showing {len(data)} of {len(data)} {unit}", + caption=content_range.caption, caption_justify="right", ) for job in data: @@ -93,7 +131,7 @@ def display_rich(data, unit: str) -> None: else: table = Table( *columns, - caption=f"Showing {len(data)} of {len(data)} {unit}", + caption=content_range.caption, caption_justify="right", ) for job in data: diff --git a/diracx-cli/tests/test_jobs.py b/diracx-cli/tests/test_jobs.py index 3875fa7f..cda962f2 100644 --- a/diracx-cli/tests/test_jobs.py +++ b/diracx-cli/tests/test_jobs.py @@ -1,13 +1,122 @@ from __future__ import annotations import json +import os +import tempfile + +import pytest +from pytest import raises from diracx import cli +from diracx.core.models import ScalarSearchSpec +from diracx.core.preferences import get_diracx_preferences +TEST_JDL = """ + Arguments = "jobDescription.xml -o LogLevel=INFO"; + Executable = "dirac-jobexec"; + JobGroup = jobGroup; + JobName = jobName; + JobType = User; + LogLevel = INFO; + OutputSandbox = + { + Script1_CodeOutput.log, + std.err, + std.out + }; + Priority = 1; + Site = ANY; + StdError = std.err; + StdOutput = std.out; +""" + + +@pytest.fixture +async def jdl_file(): + with tempfile.NamedTemporaryFile(mode="w", encoding="utf-8") as temp_file: + temp_file.write(TEST_JDL) + temp_file.flush() + yield temp_file.name + + +async def test_submit(with_cli_login, jdl_file, capfd): + """Test submitting a job using a JDL file.""" + + with open(jdl_file, "r") as temp_file: + await cli.jobs.submit([temp_file]) -async def test_search(with_cli_login, capfd): - await cli.jobs.search() cap = capfd.readouterr() assert cap.err == "" + assert "Inserted 1 jobs with ids" in cap.out + + +async def test_search(with_cli_login, jdl_file, capfd): + """Test searching for jobs.""" + + # Submit 20 jobs + with open(jdl_file, "r") as temp_file: + await cli.jobs.submit([temp_file] * 20) + + cap = capfd.readouterr() + # By default the output should be in JSON format as capfd is not a TTY - json.loads(cap.out) + await cli.jobs.search() + cap = capfd.readouterr() + assert cap.err == "" + jobs = json.loads(cap.out) + + # There should be 10 jobs by default + assert len(jobs) == 10 + assert "JobID" in jobs[0] + assert "JobGroup" in jobs[0] + + # Change per-page to a very large number to get all the jobs at once: the caption should change + await cli.jobs.search(per_page=9999) + cap = capfd.readouterr() + assert cap.err == "" + jobs = json.loads(cap.out) + + # There should be 20 jobs at least now + assert len(jobs) >= 20 + assert "JobID" in cap.out + assert "JobGroup" in cap.out + + # Search for a job that doesn't exist + condition = ScalarSearchSpec(parameter="Status", operator="eq", value="nonexistent") + await cli.jobs.search(condition=[condition]) + cap = capfd.readouterr() + assert cap.err == "" + assert "[]" == cap.out.strip() + + # Switch to RICH output + get_diracx_preferences.cache_clear() + os.environ["DIRACX_OUTPUT_FORMAT"] = "RICH" + + await cli.jobs.search() + cap = capfd.readouterr() + assert cap.err == "" + + with raises(json.JSONDecodeError): + json.loads(cap.out) + + assert "JobID" in cap.out + assert "JobGroup" in cap.out + assert "Showing 0-9 of " in cap.out + + # Change per-page to a very large number to get all the jobs at once: the caption should change + await cli.jobs.search(per_page=9999) + cap = capfd.readouterr() + assert cap.err == "" + + with raises(json.JSONDecodeError): + json.loads(cap.out) + + assert "JobID" in cap.out + assert "JobGroup" in cap.out + assert "Showing all jobs" in cap.out + + # Search for a job that doesn't exist + await cli.jobs.search(condition=[condition]) + cap = capfd.readouterr() + assert cap.err == "" + assert "No jobs found" in cap.out diff --git a/diracx-client/src/diracx/client/__init__.py b/diracx-client/src/diracx/client/__init__.py index 6c158e3c..97d268e5 100644 --- a/diracx-client/src/diracx/client/__init__.py +++ b/diracx-client/src/diracx/client/__init__.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.17) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/_client.py b/diracx-client/src/diracx/client/_client.py index 33abe916..453a48c0 100644 --- a/diracx-client/src/diracx/client/_client.py +++ b/diracx-client/src/diracx/client/_client.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.17) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/_configuration.py b/diracx-client/src/diracx/client/_configuration.py index 8d6c22c4..dec717d6 100644 --- a/diracx-client/src/diracx/client/_configuration.py +++ b/diracx-client/src/diracx/client/_configuration.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.17) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/_vendor.py b/diracx-client/src/diracx/client/_vendor.py index 09887722..3bf48338 100644 --- a/diracx-client/src/diracx/client/_vendor.py +++ b/diracx-client/src/diracx/client/_vendor.py @@ -1,5 +1,5 @@ # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.17) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/aio/__init__.py b/diracx-client/src/diracx/client/aio/__init__.py index 6c158e3c..97d268e5 100644 --- a/diracx-client/src/diracx/client/aio/__init__.py +++ b/diracx-client/src/diracx/client/aio/__init__.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.17) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/aio/_client.py b/diracx-client/src/diracx/client/aio/_client.py index 79235982..1e55fa14 100644 --- a/diracx-client/src/diracx/client/aio/_client.py +++ b/diracx-client/src/diracx/client/aio/_client.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.17) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/aio/_configuration.py b/diracx-client/src/diracx/client/aio/_configuration.py index c8ab98a6..de9cabe2 100644 --- a/diracx-client/src/diracx/client/aio/_configuration.py +++ b/diracx-client/src/diracx/client/aio/_configuration.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.17) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/aio/_vendor.py b/diracx-client/src/diracx/client/aio/_vendor.py index 09887722..3bf48338 100644 --- a/diracx-client/src/diracx/client/aio/_vendor.py +++ b/diracx-client/src/diracx/client/aio/_vendor.py @@ -1,5 +1,5 @@ # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.17) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/aio/operations/__init__.py b/diracx-client/src/diracx/client/aio/operations/__init__.py index 9ad57998..f96f665d 100644 --- a/diracx-client/src/diracx/client/aio/operations/__init__.py +++ b/diracx-client/src/diracx/client/aio/operations/__init__.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.17) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/aio/operations/_operations.py b/diracx-client/src/diracx/client/aio/operations/_operations.py index 26ccb985..9aea2d00 100644 --- a/diracx-client/src/diracx/client/aio/operations/_operations.py +++ b/diracx-client/src/diracx/client/aio/operations/_operations.py @@ -1,7 +1,7 @@ # pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.17) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase @@ -2072,7 +2072,7 @@ async def search( self, body: Optional[_models.JobSearchParams] = None, *, - page: int = 0, + page: int = 1, per_page: int = 100, content_type: str = "application/json", **kwargs: Any, @@ -2085,7 +2085,7 @@ async def search( :param body: Default value is None. :type body: ~client.models.JobSearchParams - :keyword page: Default value is 0. + :keyword page: Default value is 1. :paramtype page: int :keyword per_page: Default value is 100. :paramtype per_page: int @@ -2102,7 +2102,7 @@ async def search( self, body: Optional[IO[bytes]] = None, *, - page: int = 0, + page: int = 1, per_page: int = 100, content_type: str = "application/json", **kwargs: Any, @@ -2115,7 +2115,7 @@ async def search( :param body: Default value is None. :type body: IO[bytes] - :keyword page: Default value is 0. + :keyword page: Default value is 1. :paramtype page: int :keyword per_page: Default value is 100. :paramtype per_page: int @@ -2132,7 +2132,7 @@ async def search( self, body: Optional[Union[_models.JobSearchParams, IO[bytes]]] = None, *, - page: int = 0, + page: int = 1, per_page: int = 100, **kwargs: Any, ) -> List[JSON]: @@ -2144,7 +2144,7 @@ async def search( :param body: Is either a JobSearchParams type or a IO[bytes] type. Default value is None. :type body: ~client.models.JobSearchParams or IO[bytes] - :keyword page: Default value is 0. + :keyword page: Default value is 1. :paramtype page: int :keyword per_page: Default value is 100. :paramtype per_page: int @@ -2199,7 +2199,7 @@ async def search( response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 206]: if _stream: await response.read() # Load the body in memory and close the socket map_error( @@ -2207,10 +2207,19 @@ async def search( ) raise HttpResponseError(response=response) - deserialized = self._deserialize("[object]", pipeline_response) + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("[object]", pipeline_response) + + if response.status_code == 206: + response_headers["Content-Range"] = self._deserialize( + "str", response.headers.get("Content-Range") + ) + + deserialized = self._deserialize("[object]", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore diff --git a/diracx-client/src/diracx/client/models/__init__.py b/diracx-client/src/diracx/client/models/__init__.py index 336cfe9d..f5323580 100644 --- a/diracx-client/src/diracx/client/models/__init__.py +++ b/diracx-client/src/diracx/client/models/__init__.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.17) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -25,7 +25,6 @@ from ._models import ScalarSearchSpecValue from ._models import SetJobStatusReturn from ._models import SortSpec -from ._models import SortSpecDirection from ._models import SupportInfo from ._models import TokenResponse from ._models import UserInfoResponse @@ -38,8 +37,6 @@ from ._enums import ChecksumAlgorithm from ._enums import Enum0 from ._enums import Enum1 -from ._enums import Enum11 -from ._enums import Enum12 from ._enums import Enum2 from ._enums import Enum3 from ._enums import Enum4 @@ -47,6 +44,7 @@ from ._enums import SandboxFormat from ._enums import SandboxType from ._enums import ScalarSearchOperator +from ._enums import SortDirection from ._enums import VectorSearchOperator from ._patch import __all__ as _patch_all from ._patch import * # pylint: disable=unused-wildcard-import @@ -74,7 +72,6 @@ "ScalarSearchSpecValue", "SetJobStatusReturn", "SortSpec", - "SortSpecDirection", "SupportInfo", "TokenResponse", "UserInfoResponse", @@ -86,8 +83,6 @@ "ChecksumAlgorithm", "Enum0", "Enum1", - "Enum11", - "Enum12", "Enum2", "Enum3", "Enum4", @@ -95,6 +90,7 @@ "SandboxFormat", "SandboxType", "ScalarSearchOperator", + "SortDirection", "VectorSearchOperator", ] __all__.extend([p for p in _patch_all if p not in __all__]) diff --git a/diracx-client/src/diracx/client/models/_enums.py b/diracx-client/src/diracx/client/models/_enums.py index d448e10b..63dde62d 100644 --- a/diracx-client/src/diracx/client/models/_enums.py +++ b/diracx-client/src/diracx/client/models/_enums.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.17) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -26,18 +26,6 @@ class Enum1(str, Enum, metaclass=CaseInsensitiveEnumMeta): S256 = "S256" -class Enum11(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Enum11.""" - - ASC = "asc" - - -class Enum12(str, Enum, metaclass=CaseInsensitiveEnumMeta): - """Enum12.""" - - DSC = "dsc" - - class Enum2(str, Enum, metaclass=CaseInsensitiveEnumMeta): """Enum2.""" @@ -101,6 +89,13 @@ class ScalarSearchOperator(str, Enum, metaclass=CaseInsensitiveEnumMeta): LIKE = "like" +class SortDirection(str, Enum, metaclass=CaseInsensitiveEnumMeta): + """An enumeration.""" + + ASC = "asc" + DESC = "desc" + + class VectorSearchOperator(str, Enum, metaclass=CaseInsensitiveEnumMeta): """An enumeration.""" diff --git a/diracx-client/src/diracx/client/models/_models.py b/diracx-client/src/diracx/client/models/_models.py index 3b470cee..cfc0edb4 100644 --- a/diracx-client/src/diracx/client/models/_models.py +++ b/diracx-client/src/diracx/client/models/_models.py @@ -1,7 +1,7 @@ # coding=utf-8 # pylint: disable=too-many-lines # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.17) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- @@ -816,8 +816,8 @@ class SortSpec(_serialization.Model): :ivar parameter: Parameter. Required. :vartype parameter: str - :ivar direction: Direction. Required. - :vartype direction: ~client.models.SortSpecDirection + :ivar direction: An enumeration. Required. Known values are: "asc" and "desc". + :vartype direction: str or ~client.models.SortDirection """ _validation = { @@ -827,27 +827,27 @@ class SortSpec(_serialization.Model): _attribute_map = { "parameter": {"key": "parameter", "type": "str"}, - "direction": {"key": "direction", "type": "SortSpecDirection"}, + "direction": {"key": "direction", "type": "str"}, } def __init__( - self, *, parameter: str, direction: "_models.SortSpecDirection", **kwargs: Any + self, + *, + parameter: str, + direction: Union[str, "_models.SortDirection"], + **kwargs: Any, ) -> None: """ :keyword parameter: Parameter. Required. :paramtype parameter: str - :keyword direction: Direction. Required. - :paramtype direction: ~client.models.SortSpecDirection + :keyword direction: An enumeration. Required. Known values are: "asc" and "desc". + :paramtype direction: str or ~client.models.SortDirection """ super().__init__(**kwargs) self.parameter = parameter self.direction = direction -class SortSpecDirection(_serialization.Model): - """Direction.""" - - class SupportInfo(_serialization.Model): """SupportInfo. diff --git a/diracx-client/src/diracx/client/operations/__init__.py b/diracx-client/src/diracx/client/operations/__init__.py index 9ad57998..f96f665d 100644 --- a/diracx-client/src/diracx/client/operations/__init__.py +++ b/diracx-client/src/diracx/client/operations/__init__.py @@ -1,6 +1,6 @@ # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.17) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- diff --git a/diracx-client/src/diracx/client/operations/_operations.py b/diracx-client/src/diracx/client/operations/_operations.py index 7d1984eb..048d85a2 100644 --- a/diracx-client/src/diracx/client/operations/_operations.py +++ b/diracx-client/src/diracx/client/operations/_operations.py @@ -1,7 +1,7 @@ # pylint: disable=too-many-lines,too-many-statements # coding=utf-8 # -------------------------------------------------------------------------- -# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.9) +# Code generated by Microsoft (R) AutoRest Code Generator (autorest: 3.10.2, generator: @autorest/python@6.13.17) # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from io import IOBase @@ -672,7 +672,7 @@ def build_jobs_reschedule_single_job_request(job_id: int, **kwargs: Any) -> Http def build_jobs_search_request( - *, page: int = 0, per_page: int = 100, **kwargs: Any + *, page: int = 1, per_page: int = 100, **kwargs: Any ) -> HttpRequest: _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) @@ -2894,7 +2894,7 @@ def search( self, body: Optional[_models.JobSearchParams] = None, *, - page: int = 0, + page: int = 1, per_page: int = 100, content_type: str = "application/json", **kwargs: Any, @@ -2907,7 +2907,7 @@ def search( :param body: Default value is None. :type body: ~client.models.JobSearchParams - :keyword page: Default value is 0. + :keyword page: Default value is 1. :paramtype page: int :keyword per_page: Default value is 100. :paramtype per_page: int @@ -2924,7 +2924,7 @@ def search( self, body: Optional[IO[bytes]] = None, *, - page: int = 0, + page: int = 1, per_page: int = 100, content_type: str = "application/json", **kwargs: Any, @@ -2937,7 +2937,7 @@ def search( :param body: Default value is None. :type body: IO[bytes] - :keyword page: Default value is 0. + :keyword page: Default value is 1. :paramtype page: int :keyword per_page: Default value is 100. :paramtype per_page: int @@ -2954,7 +2954,7 @@ def search( self, body: Optional[Union[_models.JobSearchParams, IO[bytes]]] = None, *, - page: int = 0, + page: int = 1, per_page: int = 100, **kwargs: Any, ) -> List[JSON]: @@ -2966,7 +2966,7 @@ def search( :param body: Is either a JobSearchParams type or a IO[bytes] type. Default value is None. :type body: ~client.models.JobSearchParams or IO[bytes] - :keyword page: Default value is 0. + :keyword page: Default value is 1. :paramtype page: int :keyword per_page: Default value is 100. :paramtype per_page: int @@ -3021,7 +3021,7 @@ def search( response = pipeline_response.http_response - if response.status_code not in [200]: + if response.status_code not in [200, 206]: if _stream: response.read() # Load the body in memory and close the socket map_error( @@ -3029,10 +3029,19 @@ def search( ) raise HttpResponseError(response=response) - deserialized = self._deserialize("[object]", pipeline_response) + response_headers = {} + if response.status_code == 200: + deserialized = self._deserialize("[object]", pipeline_response) + + if response.status_code == 206: + response_headers["Content-Range"] = self._deserialize( + "str", response.headers.get("Content-Range") + ) + + deserialized = self._deserialize("[object]", pipeline_response) if cls: - return cls(pipeline_response, deserialized, {}) # type: ignore + return cls(pipeline_response, deserialized, response_headers) # type: ignore return deserialized # type: ignore diff --git a/diracx-core/src/diracx/core/models.py b/diracx-core/src/diracx/core/models.py index 243068bc..916cc7f5 100644 --- a/diracx-core/src/diracx/core/models.py +++ b/diracx-core/src/diracx/core/models.py @@ -2,7 +2,7 @@ from datetime import datetime from enum import StrEnum -from typing import Literal, TypedDict +from typing import TypedDict from pydantic import BaseModel, Field @@ -20,12 +20,6 @@ class VectorSearchOperator(StrEnum): NOT_IN = "not in" -# TODO: TypedDict vs pydantic? -class SortSpec(TypedDict): - parameter: str - direction: Literal["asc"] | Literal["dsc"] - - class ScalarSearchSpec(TypedDict): parameter: str operator: ScalarSearchOperator @@ -41,6 +35,17 @@ class VectorSearchSpec(TypedDict): SearchSpec = ScalarSearchSpec | VectorSearchSpec +class SortDirection(StrEnum): + ASC = "asc" + DESC = "desc" + + +# TODO: TypedDict vs pydantic? +class SortSpec(TypedDict): + parameter: str + direction: SortDirection + + class TokenResponse(BaseModel): # Based on RFC 6749 access_token: str diff --git a/diracx-db/src/diracx/db/sql/jobs/db.py b/diracx-db/src/diracx/db/sql/jobs/db.py index f932f7d1..b7a0b2ba 100644 --- a/diracx-db/src/diracx/db/sql/jobs/db.py +++ b/diracx-db/src/diracx/db/sql/jobs/db.py @@ -19,6 +19,9 @@ LimitedJobStatusReturn, ScalarSearchOperator, ScalarSearchSpec, + SearchSpec, + SortDirection, + SortSpec, ) from diracx.core.properties import JOB_SHARING, SecurityProperty @@ -83,14 +86,14 @@ async def summary(self, group_by, search) -> list[dict[str, str | int]]: async def search( self, - parameters, - search, - sorts, + parameters: list[str] | None, + search: list[SearchSpec], + sorts: list[SortSpec], *, distinct: bool = False, per_page: int = 100, page: int | None = None, - ) -> list[dict[str, Any]]: + ) -> tuple[int, list[dict[Any, Any]]]: # Find which columns to select columns = _get_columns(Jobs.__table__, parameters) stmt = select(*columns) @@ -98,28 +101,45 @@ async def search( stmt = apply_search_filters(Jobs.__table__, stmt, search) # Apply any sort constraints + sort_columns = [] for sort in sorts: if sort["parameter"] not in Jobs.__table__.columns: raise InvalidQueryError( f"Cannot sort by {sort['parameter']}: unknown column" ) column = Jobs.__table__.columns[sort["parameter"]] - if sort["direction"] == "asc": - column = column.asc() - elif sort["direction"] == "desc": - column = column.desc() + sorted_column = None + if sort["direction"] == SortDirection.ASC: + sorted_column = column.asc() + elif sort["direction"] == SortDirection.DESC: + sorted_column = column.desc() else: raise InvalidQueryError(f"Unknown sort {sort['direction']=}") + sort_columns.append(sorted_column) + + if sort_columns: + stmt = stmt.order_by(*sort_columns) if distinct: stmt = stmt.distinct() + # Calculate total count before applying pagination + total_count_subquery = stmt.alias() + total_count_stmt = select(func.count()).select_from(total_count_subquery) + total = (await self.conn.execute(total_count_stmt)).scalar_one() + # Apply pagination - if page: - raise NotImplementedError("TODO Not yet implemented") + if page is not None: + if page < 1: + raise InvalidQueryError("Page must be a positive integer") + if per_page < 1: + raise InvalidQueryError("Per page must be a positive integer") + stmt = stmt.offset((page - 1) * per_page).limit(per_page) # Execute the query - return [dict(row._mapping) async for row in (await self.conn.stream(stmt))] + return total, [ + dict(row._mapping) async for row in (await self.conn.stream(stmt)) + ] async def _insertNewJDL(self, jdl) -> int: from DIRAC.WorkloadManagementSystem.DB.JobDBUtils import compressJDL @@ -314,7 +334,7 @@ async def rescheduleJob(self, job_id) -> dict[str, Any]: from DIRAC.Core.Utilities.ClassAd.ClassAdLight import ClassAd from DIRAC.Core.Utilities.ReturnValues import SErrorException - result = await self.search( + _, result = await self.search( parameters=[ "Status", "MinorStatus", diff --git a/diracx-db/src/diracx/db/sql/jobs/status_utility.py b/diracx-db/src/diracx/db/sql/jobs/status_utility.py index 0451cc6b..106a1e42 100644 --- a/diracx-db/src/diracx/db/sql/jobs/status_utility.py +++ b/diracx-db/src/diracx/db/sql/jobs/status_utility.py @@ -43,7 +43,7 @@ async def set_job_status( for key, value in status.items(): statusDict[key] = {k: v for k, v in value.dict().items() if v is not None} - res = await job_db.search( + _, res = await job_db.search( parameters=["Status", "StartExecTime", "EndExecTime"], search=[ { diff --git a/diracx-db/tests/jobs/test_jobDB.py b/diracx-db/tests/jobs/test_jobDB.py index 5e46352b..f6a5ed55 100644 --- a/diracx-db/tests/jobs/test_jobDB.py +++ b/diracx-db/tests/jobs/test_jobDB.py @@ -4,7 +4,15 @@ import pytest -from diracx.core.exceptions import JobNotFound +from diracx.core.exceptions import InvalidQueryError, JobNotFound +from diracx.core.models import ( + ScalarSearchOperator, + ScalarSearchSpec, + SortDirection, + SortSpec, + VectorSearchOperator, + VectorSearchSpec, +) from diracx.db.sql.jobs.db import JobDB @@ -20,9 +28,11 @@ async def job_db(tmp_path): yield job_db -async def test_some_asyncio_code(job_db): +async def test_search_parameters(job_db): + """Test that we can search specific parameters for jobs in the database.""" async with job_db as job_db: - result = await job_db.search(["JobID"], [], []) + total, result = await job_db.search(["JobID"], [], []) + assert total == 0 assert not result result = await asyncio.gather( @@ -40,11 +50,265 @@ async def test_some_asyncio_code(job_db): ) async with job_db as job_db: - result = await job_db.search(["JobID"], [], []) + # Search a specific parameter: JobID + total, result = await job_db.search(["JobID"], [], []) + assert total == 100 assert result + for r in result: + assert r.keys() == {"JobID"} + + # Search a specific parameter: Status + total, result = await job_db.search(["Status"], [], []) + assert total == 100 + assert result + for r in result: + assert r.keys() == {"Status"} + + # Search for multiple parameters: JobID, Status + total, result = await job_db.search(["JobID", "Status"], [], []) + assert total == 100 + assert result + for r in result: + assert r.keys() == {"JobID", "Status"} + + # Search for a specific parameter but use distinct: Status + total, result = await job_db.search(["Status"], [], [], distinct=True) + assert total == 1 + assert result + + # Search for a non-existent parameter: Dummy + with pytest.raises(InvalidQueryError): + total, result = await job_db.search(["Dummy"], [], []) + + +async def test_search_conditions(job_db): + """Test that we can search for specific jobs in the database.""" + async with job_db as job_db: + result = await asyncio.gather( + *( + job_db.insert( + f"JDL{i}", + f"owner{i}", + "owner_group", + "New", + "dfdfds", + "lhcb", + ) + for i in range(100) + ) + ) + + async with job_db as job_db: + # Search a specific scalar condition: JobID eq 3 + condition = ScalarSearchSpec( + parameter="JobID", operator=ScalarSearchOperator.EQUAL, value=3 + ) + total, result = await job_db.search([], [condition], []) + assert total == 1 + assert result + assert len(result) == 1 + assert result[0]["JobID"] == 3 + + # Search a specific scalar condition: JobID lt 3 + condition = ScalarSearchSpec( + parameter="JobID", operator=ScalarSearchOperator.LESS_THAN, value=3 + ) + total, result = await job_db.search([], [condition], []) + assert total == 2 + assert result + assert len(result) == 2 + assert result[0]["JobID"] == 1 + assert result[1]["JobID"] == 2 + + # Search a specific scalar condition: JobID neq 3 + condition = ScalarSearchSpec( + parameter="JobID", operator=ScalarSearchOperator.NOT_EQUAL, value=3 + ) + total, result = await job_db.search([], [condition], []) + assert total == 99 + assert result + assert len(result) == 99 + assert all(r["JobID"] != 3 for r in result) + + # Search a specific scalar condition: JobID eq 5873 (does not exist) + condition = ScalarSearchSpec( + parameter="JobID", operator=ScalarSearchOperator.EQUAL, value=5873 + ) + total, result = await job_db.search([], [condition], []) + assert not result + + # Search a specific vector condition: JobID in 1,2,3 + condition = VectorSearchSpec( + parameter="JobID", operator=VectorSearchOperator.IN, values=[1, 2, 3] + ) + total, result = await job_db.search([], [condition], []) + assert total == 3 + assert result + assert len(result) == 3 + assert all(r["JobID"] in [1, 2, 3] for r in result) + + # Search a specific vector condition: JobID in 1,2,5873 (one of them does not exist) + condition = VectorSearchSpec( + parameter="JobID", operator=VectorSearchOperator.IN, values=[1, 2, 5873] + ) + total, result = await job_db.search([], [condition], []) + assert total == 2 + assert result + assert len(result) == 2 + assert all(r["JobID"] in [1, 2] for r in result) + + # Search for multiple conditions based on different parameters: JobID eq 70, JobID in 4,5,6 + condition1 = ScalarSearchSpec( + parameter="Owner", operator=ScalarSearchOperator.EQUAL, value="owner4" + ) + condition2 = VectorSearchSpec( + parameter="JobID", operator=VectorSearchOperator.IN, values=[4, 5, 6] + ) + total, result = await job_db.search([], [condition1, condition2], []) + assert total == 1 + assert result + assert len(result) == 1 + assert result[0]["JobID"] == 5 + assert result[0]["Owner"] == "owner4" + + # Search for multiple conditions based on the same parameter: JobID eq 70, JobID in 4,5,6 + condition1 = ScalarSearchSpec( + parameter="JobID", operator=ScalarSearchOperator.EQUAL, value=70 + ) + condition2 = VectorSearchSpec( + parameter="JobID", operator=VectorSearchOperator.IN, values=[4, 5, 6] + ) + total, result = await job_db.search([], [condition1, condition2], []) + assert total == 0 + assert not result + + +async def test_search_sorts(job_db): + """Test that we can search for jobs in the database and sort the results.""" + async with job_db as job_db: + result = await asyncio.gather( + *( + job_db.insert( + f"JDL{i}", + f"owner{i}", + "owner_group1" if i < 50 else "owner_group2", + "New", + "dfdfds", + "lhcb", + ) + for i in range(100) + ) + ) + + async with job_db as job_db: + # Search and sort by JobID in ascending order + sort = SortSpec(parameter="JobID", direction=SortDirection.ASC) + total, result = await job_db.search([], [], [sort]) + assert total == 100 + assert result + for i, r in enumerate(result): + assert r["JobID"] == i + 1 + + # Search and sort by JobID in descending order + sort = SortSpec(parameter="JobID", direction=SortDirection.DESC) + total, result = await job_db.search([], [], [sort]) + assert total == 100 + assert result + for i, r in enumerate(result): + assert r["JobID"] == 100 - i + + # Search and sort by Owner in ascending order + sort = SortSpec(parameter="Owner", direction=SortDirection.ASC) + total, result = await job_db.search([], [], [sort]) + assert total == 100 + assert result + # Assert that owner10 is before owner2 because of the lexicographical order + assert result[2]["Owner"] == "owner10" + assert result[12]["Owner"] == "owner2" + + # Search and sort by Owner in descending order + sort = SortSpec(parameter="Owner", direction=SortDirection.DESC) + total, result = await job_db.search([], [], [sort]) + assert total == 100 + assert result + # Assert that owner10 is before owner2 because of the lexicographical order + assert result[97]["Owner"] == "owner10" + assert result[87]["Owner"] == "owner2" + + # Search and sort by OwnerGroup in ascending order and JobID in descending order + sort1 = SortSpec(parameter="OwnerGroup", direction=SortDirection.ASC) + sort2 = SortSpec(parameter="JobID", direction=SortDirection.DESC) + total, result = await job_db.search([], [], [sort1, sort2]) + assert total == 100 + assert result + assert result[0]["OwnerGroup"] == "owner_group1" + assert result[0]["JobID"] == 50 + assert result[99]["OwnerGroup"] == "owner_group2" + assert result[99]["JobID"] == 51 + + +async def test_search_pagination(job_db): + """Test that we can search for jobs in the database.""" + async with job_db as job_db: + result = await asyncio.gather( + *( + job_db.insert( + f"JDL{i}", + f"owner{i}", + "owner_group1" if i < 50 else "owner_group2", + "New", + "dfdfds", + "lhcb", + ) + for i in range(100) + ) + ) + + async with job_db as job_db: + # Search for the first 10 jobs + total, result = await job_db.search([], [], [], per_page=10, page=1) + assert total == 100 + assert result + assert len(result) == 10 + assert result[0]["JobID"] == 1 + + # Search for the second 10 jobs + total, result = await job_db.search([], [], [], per_page=10, page=2) + assert total == 100 + assert result + assert len(result) == 10 + assert result[0]["JobID"] == 11 + + # Search for the last 10 jobs + total, result = await job_db.search([], [], [], per_page=10, page=10) + assert total == 100 + assert result + assert len(result) == 10 + assert result[0]["JobID"] == 91 + + # Search for the second 50 jobs + total, result = await job_db.search([], [], [], per_page=50, page=2) + assert total == 100 + assert result + assert len(result) == 50 + assert result[0]["JobID"] == 51 + + # Invalid page number + total, result = await job_db.search([], [], [], per_page=10, page=11) + assert total == 100 + assert not result + + # Invalid page number + with pytest.raises(InvalidQueryError): + result = await job_db.search([], [], [], per_page=10, page=0) + + # Invalid per_page number + with pytest.raises(InvalidQueryError): + result = await job_db.search([], [], [], per_page=0, page=1) async def test_set_job_command_invalid_job_id(job_db: JobDB): + """Test that setting a command for a non-existent job raises JobNotFound.""" async with job_db as job_db: with pytest.raises(JobNotFound): await job_db.set_job_command(123456, "test_command") diff --git a/diracx-routers/src/diracx/routers/job_manager/__init__.py b/diracx-routers/src/diracx/routers/job_manager/__init__.py index 6c42ead4..e995e712 100644 --- a/diracx-routers/src/diracx/routers/job_manager/__init__.py +++ b/diracx-routers/src/diracx/routers/job_manager/__init__.py @@ -6,7 +6,7 @@ from http import HTTPStatus from typing import Annotated, Any, TypedDict -from fastapi import BackgroundTasks, Body, Depends, HTTPException, Query +from fastapi import BackgroundTasks, Body, Depends, HTTPException, Query, Response from pydantic import BaseModel, root_validator from sqlalchemy.exc import NoResultFound @@ -65,13 +65,6 @@ def validate_fields(cls, v): return v -class JobDefinition(BaseModel): - owner: str - group: str - vo: str - jdl: str - - class InsertedJob(TypedDict): JobID: int Status: str @@ -507,6 +500,7 @@ async def reschedule_single_job( }, } + EXAMPLE_RESPONSES: dict[int | str, dict[str, Any]] = { 200: { "description": "List of matching results", @@ -537,8 +531,46 @@ async def reschedule_single_job( } }, }, + 206: { + "description": "Partial Content. Only a part of the requested range could be served.", + "headers": { + "Content-Range": { + "description": "The range of jobs returned in this response", + "schema": {"type": "string", "example": "jobs 0-1/4"}, + } + }, + "model": list[dict[str, Any]], + "content": { + "application/json": { + "example": [ + { + "JobID": 1, + "JobGroup": "jobGroup", + "Owner": "myvo:my_nickname", + "SubmissionTime": "2023-05-25T07:03:35.602654", + "LastUpdateTime": "2023-05-25T07:03:35.602652", + "Status": "RECEIVED", + "MinorStatus": "Job accepted", + "ApplicationStatus": "Unknown", + }, + { + "JobID": 2, + "JobGroup": "my_nickname", + "Owner": "myvo:cburr", + "SubmissionTime": "2023-05-25T07:03:36.256378", + "LastUpdateTime": "2023-05-25T07:10:11.974324", + "Status": "Done", + "MinorStatus": "Application Exited Successfully", + "ApplicationStatus": "All events processed", + }, + ] + } + }, + }, } +MAX_PER_PAGE = 10000 + @router.post("/search", responses=EXAMPLE_RESPONSES) async def search( @@ -546,7 +578,8 @@ async def search( job_db: JobDB, user_info: Annotated[AuthorizedUserInfo, Depends(verify_dirac_access_token)], check_permissions: CheckWMSPolicyCallable, - page: int = 0, + response: Response, + page: int = 1, per_page: int = 100, body: Annotated[ JobSearchParams | None, Body(openapi_examples=EXAMPLE_SEARCHES) @@ -557,6 +590,11 @@ async def search( **TODO: Add more docs** """ await check_permissions(action=ActionType.QUERY, job_db=job_db) + + # Apply a limit to per_page to prevent abuse of the API + if per_page > MAX_PER_PAGE: + per_page = MAX_PER_PAGE + if body is None: body = JobSearchParams() # TODO: Apply all the job policy stuff properly using user_info @@ -568,8 +606,8 @@ async def search( "value": user_info.sub, } ) - # TODO: Pagination - return await job_db.search( + + total, jobs = await job_db.search( body.parameters, body.search, body.sort, @@ -577,6 +615,23 @@ async def search( page=page, per_page=per_page, ) + # Set the Content-Range header if needed + # https://datatracker.ietf.org/doc/html/rfc7233#section-4 + + # No jobs found but there are jobs for the requested search + # https://datatracker.ietf.org/doc/html/rfc7233#section-4.4 + if len(jobs) == 0 and total > 0: + response.headers["Content-Range"] = f"jobs */{total}" + response.status_code = HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE + + # The total number of jobs is greater than the number of jobs returned + # https://datatracker.ietf.org/doc/html/rfc7233#section-4.2 + elif len(jobs) < total: + first_idx = per_page * (page - 1) + last_idx = min(first_idx + len(jobs), total) - 1 if total > 0 else 0 + response.headers["Content-Range"] = f"jobs {first_idx}-{last_idx}/{total}" + response.status_code = HTTPStatus.PARTIAL_CONTENT + return jobs @router.post("/summary") diff --git a/diracx-routers/tests/test_job_manager.py b/diracx-routers/tests/test_job_manager.py index 4b16ea1b..2f97833e 100644 --- a/diracx-routers/tests/test_job_manager.py +++ b/diracx-routers/tests/test_job_manager.py @@ -107,6 +107,8 @@ def test_insert_and_list_parametric_jobs(normal_user_client): listed_jobs = r.json() + assert "Content-Range" not in r.headers + assert len(listed_jobs) == 3 # Parameters.JOB_ID is 3 assert submitted_job_ids == sorted([job_dict["JobID"] for job_dict in listed_jobs]) @@ -132,33 +134,44 @@ def test_insert_and_list_bulk_jobs(job_definitions, normal_user_client): listed_jobs = r.json() + assert "Content-Range" not in r.headers + assert len(listed_jobs) == len(job_definitions) assert submitted_job_ids == sorted([job_dict["JobID"] for job_dict in listed_jobs]) def test_insert_and_search(normal_user_client): + """Test inserting a job and then searching for it.""" # job_definitions = [TEST_JDL%(normal_user_client.dirac_token_payload)] job_definitions = [TEST_JDL] r = normal_user_client.post("/api/jobs/", json=job_definitions) - assert r.status_code == 200, r.json() - assert len(r.json()) == len(job_definitions) + listed_jobs = r.json() + assert r.status_code == 200, listed_jobs + assert len(listed_jobs) == len(job_definitions) submitted_job_ids = sorted([job_dict["JobID"] for job_dict in r.json()]) # Test /jobs/search + # 1. Search for all jobs r = normal_user_client.post("/api/jobs/search") - assert r.status_code == 200, r.json() - assert [x["JobID"] for x in r.json()] == submitted_job_ids - assert {x["VerifiedFlag"] for x in r.json()} == {True} + listed_jobs = r.json() + assert r.status_code == 200, listed_jobs + assert [x["JobID"] for x in listed_jobs] == submitted_job_ids + assert {x["VerifiedFlag"] for x in listed_jobs} == {True} + # 2. Search for all jobs with status NEW: should return an empty list r = normal_user_client.post( "/api/jobs/search", json={"search": [{"parameter": "Status", "operator": "eq", "value": "NEW"}]}, ) - assert r.status_code == 200, r.json() - assert r.json() == [] + listed_jobs = r.json() + assert r.status_code == 200, listed_jobs + assert listed_jobs == [] + assert "Content-Range" not in r.headers + + # 3. Search for all jobs with status RECEIVED: should return the submitted jobs r = normal_user_client.post( "/api/jobs/search", json={ @@ -171,17 +184,24 @@ def test_insert_and_search(normal_user_client): ] }, ) - assert r.status_code == 200, r.json() - assert [x["JobID"] for x in r.json()] == submitted_job_ids + listed_jobs = r.json() + assert r.status_code == 200, listed_jobs + assert [x["JobID"] for x in listed_jobs] == submitted_job_ids + assert "Content-Range" not in r.headers + + # 4. Search for all jobs but just return the JobID and the Status r = normal_user_client.post( "/api/jobs/search", json={"parameters": ["JobID", "Status"]} ) - assert r.status_code == 200, r.json() - assert r.json() == [ + listed_jobs = r.json() + assert r.status_code == 200, listed_jobs + assert listed_jobs == [ {"JobID": jid, "Status": JobStatus.RECEIVED.value} for jid in submitted_job_ids ] + assert "Content-Range" not in r.headers + # Test /jobs/summary r = normal_user_client.post( "/api/jobs/summary", json={"grouping": ["Status", "OwnerGroup"]} @@ -220,22 +240,97 @@ def test_insert_and_search(normal_user_client): def test_search_distinct(normal_user_client): + """Test that the distinct parameter works as expected.""" job_definitions = [TEST_JDL, TEST_JDL, TEST_JDL] r = normal_user_client.post("/api/jobs/", json=job_definitions) - assert r.status_code == 200, r.json() - assert len(r.json()) == len(job_definitions) + listed_jobs = r.json() + assert r.status_code == 200, listed_jobs + assert len(listed_jobs) == len(job_definitions) # Check that distinct collapses identical records when true r = normal_user_client.post( "/api/jobs/search", json={"parameters": ["Status"], "distinct": False} ) - assert r.status_code == 200, r.json() - assert len(r.json()) > 1 + listed_jobs = r.json() + assert r.status_code == 200, listed_jobs + assert len(listed_jobs) > 1 + + assert "Content-Range" not in r.headers + r = normal_user_client.post( "/api/jobs/search", json={"parameters": ["Status"], "distinct": True} ) - assert r.status_code == 200, r.json() - assert len(r.json()) == 1 + listed_jobs = r.json() + assert r.status_code == 200, listed_jobs + assert len(listed_jobs) == 1 + + assert "Content-Range" not in r.headers + + +def test_search_pagination(normal_user_client): + """Test that the pagination works as expected.""" + job_definitions = [TEST_JDL] * 20 + r = normal_user_client.post("/api/jobs/", json=job_definitions) + listed_jobs = r.json() + assert r.status_code == 200, listed_jobs + assert len(listed_jobs) == len(job_definitions) + + # Get the first 20 jobs (all of them) + r = normal_user_client.post("/api/jobs/search", params={"page": 1, "per_page": 20}) + listed_jobs = r.json() + assert r.status_code == 200, listed_jobs + assert len(listed_jobs) == 20 + + assert "Content-Range" not in r.headers + + # Get the first 10 jobs + r = normal_user_client.post("/api/jobs/search", params={"page": 1, "per_page": 10}) + listed_jobs = r.json() + assert r.status_code == 206, listed_jobs + assert len(listed_jobs) == 10 + + assert "Content-Range" in r.headers + assert ( + r.headers["Content-Range"] + == f"jobs 0-{len(listed_jobs) -1}/{len(job_definitions)}" + ) + + # Get the next 10 jobs + r = normal_user_client.post("/api/jobs/search", params={"page": 2, "per_page": 10}) + listed_jobs = r.json() + assert r.status_code == 206, listed_jobs + assert len(listed_jobs) == 10 + + assert "Content-Range" in r.headers + assert ( + r.headers["Content-Range"] + == f"jobs 10-{len(listed_jobs) + 10 - 1}/{len(job_definitions)}" + ) + + # Get an unknown page + r = normal_user_client.post("/api/jobs/search", params={"page": 3, "per_page": 10}) + listed_jobs = r.json() + assert r.status_code == 416, listed_jobs + assert len(listed_jobs) == 0 + + assert "Content-Range" in r.headers + assert r.headers["Content-Range"] == f"jobs */{len(job_definitions)}" + + # Set the per_page parameter to 0 + r = normal_user_client.post("/api/jobs/search", params={"page": 1, "per_page": 0}) + assert r.status_code == 400, r.json() + + # Set the per_page parameter to a negative number + r = normal_user_client.post("/api/jobs/search", params={"page": 1, "per_page": -1}) + assert r.status_code == 400, r.json() + + # Set the page parameter to 0 + r = normal_user_client.post("/api/jobs/search", params={"page": 0, "per_page": 10}) + assert r.status_code == 400, r.json() + + # Set the page parameter to a negative number + r = normal_user_client.post("/api/jobs/search", params={"page": -1, "per_page": 10}) + assert r.status_code == 400, r.json() def test_user_cannot_submit_parametric_jdl_greater_than_max_parametric_jobs(