From faab36a7511376a03c22c25b4b692ae65061b821 Mon Sep 17 00:00:00 2001 From: Paul Hoffman Date: Thu, 19 Dec 2024 11:04:42 -0800 Subject: [PATCH] Convert `Optional[]` to union types (#252) Convert `Optional[]` to [union types](https://docs.python.org/3/library/stdtypes.html#union-type) in type hints [SC-61053](https://app.shortcut.com/tiledb-inc/story/61053) resolves single-cell-data/TileDB-SOMA#3334 --- python-spec/src/somacore/_mixin.py | 10 +-- python-spec/src/somacore/base.py | 12 ++-- python-spec/src/somacore/collection.py | 37 +++++------ python-spec/src/somacore/coordinates.py | 6 +- python-spec/src/somacore/data.py | 37 +++++------ python-spec/src/somacore/experiment.py | 8 ++- python-spec/src/somacore/options.py | 8 ++- python-spec/src/somacore/query/axis.py | 6 +- python-spec/src/somacore/query/query.py | 27 ++++---- python-spec/src/somacore/scene.py | 36 +++++------ python-spec/src/somacore/spatial.py | 83 +++++++++++++------------ python-spec/src/somacore/types.py | 13 ++-- 12 files changed, 150 insertions(+), 133 deletions(-) diff --git a/python-spec/src/somacore/_mixin.py b/python-spec/src/somacore/_mixin.py index 9fc9ab22..1d0d8ca7 100644 --- a/python-spec/src/somacore/_mixin.py +++ b/python-spec/src/somacore/_mixin.py @@ -1,6 +1,8 @@ """Tools for making mixins with SOMA Collections.""" -from typing import Generic, MutableMapping, Optional, Type, TypeVar, Union, overload +from __future__ import annotations + +from typing import Generic, MutableMapping, Type, TypeVar, Union, overload import attrs @@ -35,10 +37,10 @@ class FSCollection(FirstSecondMixin, CollectionBase): inst.second = 500 """ - typ: Optional[Type[_T]] = None + typ: Type[_T] | None = None """The type we expect to return from this field.""" - item_name: Optional[str] = None + item_name: str | None = None """The name of the item we are getting (``x._backing["whatever"]``). This uses the name of the field by default but can be manually overridden. @@ -59,7 +61,7 @@ def __get__(self, inst: None, owner: Type[_Coll]) -> "item[_T]": ... @overload def __get__(self, inst: _Coll, owner: Type[_Coll]) -> _T: ... - def __get__(self, inst: Optional[_Coll], owner: Type[_Coll]) -> Union["item", _T]: + def __get__(self, inst: _Coll | None, owner: Type[_Coll]) -> Union["item", _T]: del owner # unused if not inst: return self diff --git a/python-spec/src/somacore/base.py b/python-spec/src/somacore/base.py index 8d6198f4..91b0c441 100644 --- a/python-spec/src/somacore/base.py +++ b/python-spec/src/somacore/base.py @@ -4,8 +4,10 @@ members will be exported to the ``somacore`` namespace. """ +from __future__ import annotations + import abc -from typing import Any, ClassVar, MutableMapping, Optional +from typing import Any, ClassVar, MutableMapping from typing_extensions import LiteralString, Self @@ -25,8 +27,8 @@ def open( uri: str, mode: options.OpenMode = "r", *, - context: Optional[Any] = None, - platform_config: Optional[options.PlatformConfig] = None, + context: Any | None = None, + platform_config: options.PlatformConfig | None = None, ) -> Self: """Opens the SOMA object of this type at the given URI. @@ -43,7 +45,7 @@ def open( @classmethod @abc.abstractmethod - def exists(cls, uri: str, *, context: Optional[Any] = None) -> bool: + def exists(cls, uri: str, *, context: Any | None = None) -> bool: """Checks whether a SOMA object of this type is stored at the URI. Args: @@ -66,7 +68,7 @@ def uri(self) -> str: raise NotImplementedError() @property - def context(self) -> Optional[types.ContextBase]: + def context(self) -> types.ContextBase | None: """A value storing implementation-specific configuration information. This contains long-lived (i.e., not call-specific) information that is diff --git a/python-spec/src/somacore/collection.py b/python-spec/src/somacore/collection.py index 8ff2877e..bbe11f9e 100644 --- a/python-spec/src/somacore/collection.py +++ b/python-spec/src/somacore/collection.py @@ -1,8 +1,9 @@ +from __future__ import annotations + import abc from typing import ( Any, MutableMapping, - Optional, Sequence, Tuple, Type, @@ -43,8 +44,8 @@ def create( cls, uri: str, *, - platform_config: Optional[options.PlatformConfig] = None, - context: Optional[Any] = None, + platform_config: options.PlatformConfig | None = None, + context: Any | None = None, ) -> Self: """Creates a new collection of this type at the given URI. @@ -69,8 +70,8 @@ def add_new_collection( key: str, kind: None = None, *, - uri: Optional[str] = ..., - platform_config: Optional[options.PlatformConfig] = ..., + uri: str | None = ..., + platform_config: options.PlatformConfig | None = ..., ) -> "Collection": ... @overload @@ -80,18 +81,18 @@ def add_new_collection( key: str, kind: Type[_CT], *, - uri: Optional[str] = ..., - platform_config: Optional[options.PlatformConfig] = ..., + uri: str | None = ..., + platform_config: options.PlatformConfig | None = ..., ) -> _CT: ... @abc.abstractmethod def add_new_collection( self, key: str, - kind: Optional[Type["BaseCollection"]] = None, + kind: Type["BaseCollection"] | None = None, *, - uri: Optional[str] = None, - platform_config: Optional[options.PlatformConfig] = None, + uri: str | None = None, + platform_config: options.PlatformConfig | None = None, ) -> "BaseCollection": """Creates a new sub-collection of this collection. To add an existing collection as a sub-element of this collection, @@ -151,11 +152,11 @@ def add_new_dataframe( self, key: str, *, - uri: Optional[str] = None, + uri: str | None = None, schema: pa.Schema, index_column_names: Sequence[str] = (options.SOMA_JOINID,), - domain: Optional[Sequence[Optional[Tuple[Any, Any]]]] = None, - platform_config: Optional[options.PlatformConfig] = None, + domain: Sequence[Tuple[Any, Any] | None] | None = None, + platform_config: options.PlatformConfig | None = None, ) -> data.DataFrame: """Creates a new DataFrame as a child of this collection. @@ -174,10 +175,10 @@ def add_new_dense_ndarray( self, key: str, *, - uri: Optional[str] = None, + uri: str | None = None, type: pa.DataType, shape: Sequence[int], - platform_config: Optional[options.PlatformConfig] = None, + platform_config: options.PlatformConfig | None = None, ) -> data.DenseNDArray: """Creates a new dense NDArray as a child of this collection. @@ -196,10 +197,10 @@ def add_new_sparse_ndarray( self, key: str, *, - uri: Optional[str] = None, + uri: str | None = None, type: pa.DataType, shape: Sequence[int], - platform_config: Optional[options.PlatformConfig] = None, + platform_config: options.PlatformConfig | None = None, ) -> data.SparseNDArray: """Creates a new sparse NDArray as a child of this collection. @@ -219,7 +220,7 @@ def __setitem__(self, key: str, value: _Elem) -> None: @abc.abstractmethod def set( - self, key: str, value: _Elem, *, use_relative_uri: Optional[bool] = None + self, key: str, value: _Elem, *, use_relative_uri: bool | None = None ) -> Self: """Sets an entry of this collection. diff --git a/python-spec/src/somacore/coordinates.py b/python-spec/src/somacore/coordinates.py index bf0d1c79..bb8c2e81 100644 --- a/python-spec/src/somacore/coordinates.py +++ b/python-spec/src/somacore/coordinates.py @@ -1,9 +1,11 @@ """Definitions of types related to coordinate systems.""" +from __future__ import annotations + import abc import collections.abc import itertools -from typing import Iterable, Optional, Sequence, Tuple, Union +from typing import Iterable, Sequence, Tuple, Union import attrs import numpy as np @@ -23,7 +25,7 @@ class Axis: name: str """Name of the axis.""" - unit: Optional[str] = None + unit: str | None = None """Optional string name for the units of the axis.""" diff --git a/python-spec/src/somacore/data.py b/python-spec/src/somacore/data.py index b56df7db..8d3f7832 100644 --- a/python-spec/src/somacore/data.py +++ b/python-spec/src/somacore/data.py @@ -6,13 +6,14 @@ Default values are provided here as a reference for implementors. """ +from __future__ import annotations + import abc from typing import ( Any, ClassVar, Iterator, List, - Optional, Sequence, Tuple, TypeVar, @@ -51,9 +52,9 @@ def create( *, schema: pa.Schema, index_column_names: Sequence[str] = (options.SOMA_JOINID,), - domain: Optional[Sequence[Optional[Tuple[Any, Any]]]] = None, - platform_config: Optional[options.PlatformConfig] = None, - context: Optional[Any] = None, + domain: Sequence[Tuple[Any, Any] | None] | None = None, + platform_config: options.PlatformConfig | None = None, + context: Any | None = None, ) -> Self: """Creates a new ``DataFrame`` at the given URI. @@ -112,13 +113,13 @@ def create( def read( self, coords: options.SparseDFCoords = (), - column_names: Optional[Sequence[str]] = None, + column_names: Sequence[str] | None = None, *, batch_size: options.BatchSize = options.BatchSize(), - partitions: Optional[options.ReadPartitions] = None, + partitions: options.ReadPartitions | None = None, result_order: options.ResultOrderStr = _RO_AUTO, - value_filter: Optional[str] = None, - platform_config: Optional[options.PlatformConfig] = None, + value_filter: str | None = None, + platform_config: options.PlatformConfig | None = None, ) -> "ReadIter[pa.Table]": """Reads a user-defined slice of data into Arrow tables. @@ -218,7 +219,7 @@ def write( self, values: Union[pa.RecordBatch, pa.Table], *, - platform_config: Optional[options.PlatformConfig] = None, + platform_config: options.PlatformConfig | None = None, ) -> Self: """Writes the data from an Arrow table to the persistent object. @@ -283,9 +284,9 @@ def create( uri: str, *, type: pa.DataType, - shape: Sequence[Optional[int]], - platform_config: Optional[options.PlatformConfig] = None, - context: Optional[Any] = None, + shape: Sequence[int | None], + platform_config: options.PlatformConfig | None = None, + context: Any | None = None, ) -> Self: """Creates a new ND array of the current type at the given URI. @@ -376,9 +377,9 @@ def read( self, coords: options.DenseNDCoords = (), *, - partitions: Optional[options.ReadPartitions] = None, + partitions: options.ReadPartitions | None = None, result_order: options.ResultOrderStr = _RO_AUTO, - platform_config: Optional[options.PlatformConfig] = None, + platform_config: options.PlatformConfig | None = None, ) -> pa.Tensor: """Reads the specified subarray as a Tensor. @@ -432,7 +433,7 @@ def write( coords: options.DenseNDCoords, values: pa.Tensor, *, - platform_config: Optional[options.PlatformConfig] = None, + platform_config: options.PlatformConfig | None = None, ) -> Self: """Writes an Arrow tensor to a subarray of the persistent object. @@ -476,9 +477,9 @@ def read( coords: options.SparseNDCoords = (), *, batch_size: options.BatchSize = options.BatchSize(), - partitions: Optional[options.ReadPartitions] = None, + partitions: options.ReadPartitions | None = None, result_order: options.ResultOrderStr = _RO_AUTO, - platform_config: Optional[options.PlatformConfig] = None, + platform_config: options.PlatformConfig | None = None, ) -> "SparseRead": """Reads the specified subarray in batches. @@ -537,7 +538,7 @@ def write( self, values: SparseArrowData, *, - platform_config: Optional[options.PlatformConfig] = None, + platform_config: options.PlatformConfig | None = None, ) -> Self: """Writes a Tensor to a subarray of the persistent object. diff --git a/python-spec/src/somacore/experiment.py b/python-spec/src/somacore/experiment.py index 8869ffa7..c825cb25 100644 --- a/python-spec/src/somacore/experiment.py +++ b/python-spec/src/somacore/experiment.py @@ -1,6 +1,8 @@ +from __future__ import annotations + from abc import ABC from abc import abstractmethod -from typing import Generic, Optional, TypeVar +from typing import Generic, TypeVar from typing_extensions import Final @@ -71,8 +73,8 @@ def axis_query( self, measurement_name: str, *, - obs_query: Optional[query.AxisQuery] = None, - var_query: Optional[query.AxisQuery] = None, + var_query: query.AxisQuery | None = None, + obs_query: query.AxisQuery | None = None, ) -> ExperimentAxisQuery: """Creates an axis query over this experiment. diff --git a/python-spec/src/somacore/options.py b/python-spec/src/somacore/options.py index 17faadb6..27836a41 100644 --- a/python-spec/src/somacore/options.py +++ b/python-spec/src/somacore/options.py @@ -4,8 +4,10 @@ SOMA types that require them, not reimplemented by the implementing package. """ +from __future__ import annotations + import enum -from typing import Any, Dict, Mapping, Optional, Sequence, TypeVar, Union +from typing import Any, Dict, Mapping, Sequence, TypeVar, Union import attrs import numpy as np @@ -89,9 +91,9 @@ class BatchSize: Experimental """ - count: Optional[int] = attrs.field(default=None) + count: int | None = attrs.field(default=None) """``arrow.Table``s with this number of rows will be returned.""" - bytes: Optional[int] = attrs.field(default=None) + bytes: int | None = attrs.field(default=None) """Data of up to this size in bytes will be returned.""" @count.validator diff --git a/python-spec/src/somacore/query/axis.py b/python-spec/src/somacore/query/axis.py index 9f90e826..c108b6a8 100644 --- a/python-spec/src/somacore/query/axis.py +++ b/python-spec/src/somacore/query/axis.py @@ -1,4 +1,6 @@ -from typing import Optional, Sequence, Tuple +from __future__ import annotations + +from typing import Sequence, Tuple import attrs import numpy as np @@ -81,7 +83,7 @@ class AxisQuery: Lifecycle: maturing """ - value_filter: Optional[str] = attrs.field( + value_filter: str | None = attrs.field( default=None, validator=attrs.validators.optional(attrs.validators.instance_of(str)), ) diff --git a/python-spec/src/somacore/query/query.py b/python-spec/src/somacore/query/query.py index 51e01efb..f1eb9f7e 100644 --- a/python-spec/src/somacore/query/query.py +++ b/python-spec/src/somacore/query/query.py @@ -1,9 +1,10 @@ +from __future__ import annotations + from abc import ABC from abc import abstractmethod from typing import ( Any, Mapping, - Optional, Sequence, Union, ) @@ -35,9 +36,9 @@ class AxisColumnNames(TypedDict, total=False): Lifecycle: maturing """ - obs: Optional[Sequence[str]] + obs: Sequence[str] | None """obs columns to use. All columns if ``None`` or not present.""" - var: Optional[Sequence[str]] + var: Sequence[str] | None """var columns to use. All columns if ``None`` or not present.""" @@ -59,11 +60,11 @@ class ExperimentAxisQuery(ABC): def obs( self, *, - column_names: Optional[Sequence[str]] = None, + column_names: Sequence[str] | None = None, batch_size: BatchSize = BatchSize(), - partitions: Optional[ReadPartitions] = None, + partitions: ReadPartitions | None = None, result_order: ResultOrderStr = _RO_AUTO, - platform_config: Optional[PlatformConfig] = None, + platform_config: PlatformConfig | None = None, ) -> ReadIter[pa.Table]: """Returns ``obs`` as an `Arrow table `_ @@ -77,11 +78,11 @@ def obs( def var( self, *, - column_names: Optional[Sequence[str]] = None, + column_names: Sequence[str] | None = None, batch_size: BatchSize = BatchSize(), - partitions: Optional[ReadPartitions] = None, + partitions: ReadPartitions | None = None, result_order: ResultOrderStr = _RO_AUTO, - platform_config: Optional[PlatformConfig] = None, + platform_config: PlatformConfig | None = None, ) -> ReadIter[pa.Table]: """Returns ``var`` as an `Arrow table `_ @@ -140,9 +141,9 @@ def X( layer_name: str, *, batch_size: BatchSize = BatchSize(), - partitions: Optional[ReadPartitions] = None, + partitions: ReadPartitions | None = None, result_order: ResultOrderStr = _RO_AUTO, - platform_config: Optional[PlatformConfig] = None, + platform_config: PlatformConfig | None = None, ) -> SparseRead: """Returns an ``X`` layer as a sparse read. @@ -214,7 +215,7 @@ def to_anndata( self, X_name: str, *, - column_names: Optional[AxisColumnNames] = None, + column_names: AxisColumnNames | None = None, X_layers: Sequence[str] = (), obsm_layers: Sequence[str] = (), obsp_layers: Sequence[str] = (), @@ -300,7 +301,7 @@ def ms(self) -> Mapping[str, measurement.Measurement]: ... def obs(self) -> DataFrame: ... @property - def context(self) -> Optional[base_types.ContextBase]: ... + def context(self) -> base_types.ContextBase | None: ... @property def obs_spatial_presence(self) -> DataFrame: ... diff --git a/python-spec/src/somacore/scene.py b/python-spec/src/somacore/scene.py index 3a81e57d..8f2ec002 100644 --- a/python-spec/src/somacore/scene.py +++ b/python-spec/src/somacore/scene.py @@ -1,7 +1,9 @@ """Implementation of the SOMA scene collection for spatial data""" +from __future__ import annotations + import abc -from typing import Any, Generic, Optional, Sequence, TypeVar, Union +from typing import Any, Generic, Sequence, TypeVar, Union from typing_extensions import Final, Self @@ -97,11 +99,9 @@ def create( cls, uri: str, *, - coordinate_space: Optional[ - Union[Sequence[str], coordinates.CoordinateSpace] - ] = None, - platform_config: Optional[options.PlatformConfig] = None, - context: Optional[Any] = None, + coordinate_space: Sequence[str] | coordinates.CoordinateSpace | None = None, + platform_config: options.PlatformConfig | None = None, + context: Any | None = None, ) -> Self: """Creates a new scene at the given URI. @@ -123,7 +123,7 @@ def create( @property @abc.abstractmethod - def coordinate_space(self) -> Optional[coordinates.CoordinateSpace]: + def coordinate_space(self) -> coordinates.CoordinateSpace | None: """Coordinate system for this scene. Lifecycle: experimental @@ -141,8 +141,8 @@ def add_new_geometry_dataframe( key: str, subcollection: Union[str, Sequence[str]], *, - transform: Optional[coordinates.CoordinateTransform], - uri: Optional[str] = ..., + transform: coordinates.CoordinateTransform | None, + uri: str | None = ..., **kwargs, ) -> _GeometryDataFrame: """Adds a ``GeometryDataFrame`` to the scene and sets a coordinate transform @@ -182,8 +182,8 @@ def add_new_multiscale_image( key: str, subcollection: Union[str, Sequence[str]], *, - transform: Optional[coordinates.CoordinateTransform], - uri: Optional[str] = ..., + transform: coordinates.CoordinateTransform | None, + uri: str | None = ..., **kwargs, ) -> _MultiscaleImage: """Adds a ``MultiscaleImage`` to the scene and sets a coordinate transform @@ -214,8 +214,8 @@ def add_new_point_cloud_dataframe( key: str, subcollection: Union[str, Sequence[str]], *, - transform: Optional[coordinates.CoordinateTransform], - uri: Optional[str] = ..., + transform: coordinates.CoordinateTransform | None, + uri: str | None = ..., **kwargs, ) -> _PointCloudDataFrame: """Adds a point cloud to the scene and sets a coordinate transform @@ -257,7 +257,7 @@ def set_transform_to_geometry_dataframe( subcollection: Union[str, Sequence[str]] = "obsl", *, transform: coordinates.CoordinateTransform, - coordinate_space: Optional[coordinates.CoordinateSpace] = None, + coordinate_space: coordinates.CoordinateSpace | None = None, ) -> _GeometryDataFrame: """Adds the coordinate transform for the scene coordinate space to a geometry dataframe stored in the scene. @@ -293,7 +293,7 @@ def set_transform_to_multiscale_image( subcollection: Union[str, Sequence[str]] = "img", *, transform: coordinates.CoordinateTransform, - coordinate_space: Optional[coordinates.CoordinateSpace] = None, + coordinate_space: coordinates.CoordinateSpace | None = None, ) -> _MultiscaleImage: """Adds the coordinate transform for the scene coordinate space to a multiscale image stored in the scene. @@ -325,7 +325,7 @@ def set_transform_to_point_cloud_dataframe( subcollection: Union[str, Sequence[str]] = "obsl", *, transform: coordinates.CoordinateTransform, - coordinate_space: Optional[coordinates.CoordinateSpace] = None, + coordinate_space: coordinates.CoordinateSpace | None = None, ) -> _PointCloudDataFrame: """Adds the coordinate transform for the scene coordinate space to a point cloud stored in the scene. @@ -380,7 +380,7 @@ def get_transform_from_multiscale_image( key: str, subcollection: str = "img", *, - level: Optional[Union[str, int]] = None, + level: str | int | None = None, ) -> coordinates.CoordinateTransform: """Returns the coordinate transformation from the requested multiscale image to the scene. @@ -444,7 +444,7 @@ def get_transform_to_multiscale_image( key: str, subcollection: str = "img", *, - level: Optional[Union[str, int]] = None, + level: str | int | None = None, ) -> coordinates.CoordinateTransform: """Returns the coordinate transformation from the scene to a requested multiscale image. diff --git a/python-spec/src/somacore/spatial.py b/python-spec/src/somacore/spatial.py index 1a70b265..f042af52 100644 --- a/python-spec/src/somacore/spatial.py +++ b/python-spec/src/somacore/spatial.py @@ -1,12 +1,13 @@ """Implementation of the SOMA image collection for spatial data""" +from __future__ import annotations + import abc from dataclasses import dataclass from typing import ( Any, Generic, MutableMapping, - Optional, Sequence, Tuple, TypeVar, @@ -60,9 +61,9 @@ def create( "x", "y", ), - domain: Optional[Sequence[Optional[Tuple[Any, Any]]]] = None, - platform_config: Optional[options.PlatformConfig] = None, - context: Optional[Any] = None, + domain: Sequence[Tuple[Any, Any] | None] | None = None, + platform_config: options.PlatformConfig | None = None, + context: Any | None = None, ) -> Self: """Creates a new ``PointCloudDataFrame`` at the given URI. @@ -114,13 +115,13 @@ def create( def read( self, coords: options.SparseDFCoords = (), - column_names: Optional[Sequence[str]] = None, + column_names: Sequence[str] | None = None, *, batch_size: options.BatchSize = options.BatchSize(), - partitions: Optional[options.ReadPartitions] = None, + partitions: options.ReadPartitions | None = None, result_order: options.ResultOrderStr = _RO_AUTO, - value_filter: Optional[str] = None, - platform_config: Optional[options.PlatformConfig] = None, + value_filter: str | None = None, + platform_config: options.PlatformConfig | None = None, ) -> data.ReadIter[pa.Table]: """Reads a user-defined slice of data into Arrow tables. @@ -151,16 +152,16 @@ def read( @abc.abstractmethod def read_spatial_region( self, - region: Optional[options.SpatialRegion] = None, - column_names: Optional[Sequence[str]] = None, + region: options.SpatialRegion | None = None, + column_names: Sequence[str] | None = None, *, - region_transform: Optional[coordinates.CoordinateTransform] = None, - region_coord_space: Optional[coordinates.CoordinateSpace] = None, + region_transform: coordinates.CoordinateTransform | None = None, + region_coord_space: coordinates.CoordinateSpace | None = None, batch_size: options.BatchSize = options.BatchSize(), - partitions: Optional[options.ReadPartitions] = None, + partitions: options.ReadPartitions | None = None, result_order: options.ResultOrderStr = _RO_AUTO, - value_filter: Optional[str] = None, - platform_config: Optional[options.PlatformConfig] = None, + value_filter: str | None = None, + platform_config: options.PlatformConfig | None = None, ) -> "SpatialRead[data.ReadIter[pa.Table]]": """Reads data intersecting an user-defined region of space into a :class:`SpatialRead` with data in Arrow tables. @@ -203,7 +204,7 @@ def write( self, values: Union[pa.RecordBatch, pa.Table], *, - platform_config: Optional[options.PlatformConfig] = None, + platform_config: options.PlatformConfig | None = None, ) -> Self: """Writes the data from an Arrow table to the persistent object. @@ -299,9 +300,9 @@ def create( "x", "y", ), - domain: Optional[Sequence[Optional[Tuple[Any, Any]]]] = None, - platform_config: Optional[options.PlatformConfig] = None, - context: Optional[Any] = None, + domain: Sequence[Tuple[Any, Any] | None] | None = None, + platform_config: options.PlatformConfig | None = None, + context: Any | None = None, ) -> Self: """Creates a new ``GeometryDataFrame`` at the given URI. @@ -349,13 +350,13 @@ def create( def read( self, coords: options.SparseDFCoords = (), - column_names: Optional[Sequence[str]] = None, + column_names: Sequence[str] | None = None, *, batch_size: options.BatchSize = options.BatchSize(), - partitions: Optional[options.ReadPartitions] = None, + partitions: options.ReadPartitions | None = None, result_order: options.ResultOrderStr = _RO_AUTO, - value_filter: Optional[str] = None, - platform_config: Optional[options.PlatformConfig] = None, + value_filter: str | None = None, + platform_config: options.PlatformConfig | None = None, ) -> data.ReadIter[pa.Table]: """Reads a user-defined slice of data into Arrow tables. @@ -386,16 +387,16 @@ def read( @abc.abstractmethod def read_spatial_region( self, - region: Optional[options.SpatialRegion] = None, - column_names: Optional[Sequence[str]] = None, + region: options.SpatialRegion | None = None, + column_names: Sequence[str] | None = None, *, - region_transform: Optional[coordinates.CoordinateTransform] = None, - region_coord_space: Optional[coordinates.CoordinateSpace] = None, + region_transform: coordinates.CoordinateTransform | None = None, + region_coord_space: coordinates.CoordinateSpace | None = None, batch_size: options.BatchSize = options.BatchSize(), - partitions: Optional[options.ReadPartitions] = None, + partitions: options.ReadPartitions | None = None, result_order: options.ResultOrderStr = _RO_AUTO, - value_filter: Optional[str] = None, - platform_config: Optional[options.PlatformConfig] = None, + value_filter: str | None = None, + platform_config: options.PlatformConfig | None = None, ) -> "SpatialRead[data.ReadIter[pa.Table]]": """Reads data intersecting an user-defined region of space into a :class:`SpatialRead` with data in Arrow tables. @@ -438,7 +439,7 @@ def write( self, values: Union[pa.RecordBatch, pa.Table], *, - platform_config: Optional[options.PlatformConfig] = None, + platform_config: options.PlatformConfig | None = None, ) -> Self: """Writes the data from an Arrow table to the persistent object. @@ -551,14 +552,14 @@ def create( type: pa.DataType, level_shape: Sequence[int], level_key: str = "level0", - level_uri: Optional[str] = None, + level_uri: str | None = None, coordinate_space: Union[Sequence[str], coordinates.CoordinateSpace] = ( "x", "y", ), - data_axis_order: Optional[Sequence[str]] = None, - platform_config: Optional[options.PlatformConfig] = None, - context: Optional[Any] = None, + data_axis_order: Sequence[str] | None = None, + platform_config: options.PlatformConfig | None = None, + context: Any | None = None, ) -> Self: """Creates a new MultiscaleImage with one initial level. @@ -598,7 +599,7 @@ def add_new_level( self, key: str, *, - uri: Optional[str] = None, + uri: str | None = None, shape: Sequence[int], ) -> _DenseND: """Add a new level in the multi-scale image. @@ -617,7 +618,7 @@ def set( key: str, value: _DenseND, *, - use_relative_uri: Optional[bool] = None, + use_relative_uri: bool | None = None, ) -> Self: """Sets a new level in the multi-scale image to be an existing SOMA :class:`data.DenseNDArray`. @@ -651,11 +652,11 @@ def read_spatial_region( region: options.SpatialRegion = (), *, channel_coords: options.DenseCoord = None, - region_transform: Optional[coordinates.CoordinateTransform] = None, - region_coord_space: Optional[coordinates.CoordinateSpace] = None, + region_transform: coordinates.CoordinateTransform | None = None, + region_coord_space: coordinates.CoordinateSpace | None = None, result_order: options.ResultOrderStr = _RO_AUTO, - data_axis_order: Optional[Sequence[str]] = None, - platform_config: Optional[options.PlatformConfig] = None, + data_axis_order: Sequence[str] | None = None, + platform_config: options.PlatformConfig | None = None, ) -> "SpatialRead[pa.Tensor]": """Reads a user-defined region of space into a :class:`SpatialRead` with data in either an Arrow tensor or table. diff --git a/python-spec/src/somacore/types.py b/python-spec/src/somacore/types.py index ba647d2a..1fe02788 100644 --- a/python-spec/src/somacore/types.py +++ b/python-spec/src/somacore/types.py @@ -5,9 +5,10 @@ their own internal type-checking purposes. """ +from __future__ import annotations + from concurrent import futures from typing import ( - Optional, Sequence, Tuple, Type, @@ -68,7 +69,7 @@ class Slice(Protocol[_T_co]): # We use @property here to indicate that these fields are read-only; # just saying:: # - # start: Optional[_T_co] + # start: _T_co | None # # would imply that doing:: # @@ -78,13 +79,13 @@ class Slice(Protocol[_T_co]): # invariant rather than covariant. @property - def start(self) -> Optional[_T_co]: ... + def start(self) -> _T_co | None: ... @property - def stop(self) -> Optional[_T_co]: ... + def stop(self) -> _T_co | None: ... @property - def step(self) -> Optional[_T_co]: ... + def step(self) -> _T_co | None: ... def is_slice_of(__obj: object, __typ: Type[_T]) -> TypeGuard[Slice[_T]]: @@ -103,4 +104,4 @@ class ContextBase(Protocol): experiment queries. Otherwise, the implementer will use its own threadpool. """ - threadpool: Optional[futures.ThreadPoolExecutor] + threadpool: futures.ThreadPoolExecutor | None