Skip to content

Commit

Permalink
Merge pull request #4 from MetaCell/feature/CC-11
Browse files Browse the repository at this point in the history
CC-11 Add resolution arguments on CLI for segmentation encoding
  • Loading branch information
seankmartin authored Nov 21, 2023
2 parents a4ca8f5 + 38fc6e7 commit 1d96bf1
Show file tree
Hide file tree
Showing 3 changed files with 121 additions and 85 deletions.
22 changes: 21 additions & 1 deletion src/cryo_et_neuroglancer/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,24 @@


def encode_segmentation(
zarr_path: str, skip_existing: bool, output: str, block_size: int
zarr_path: str,
skip_existing: bool,
output: str,
block_size: int,
resolution: tuple[int, int, int] | list[int],
):
file_path = Path(zarr_path)
if not file_path.exists():
print(f"The input ZARR folder {file_path!s} doesn't exist")
return 1
if len(resolution) == 1:
resolution = (resolution[0],) * 3 # type: ignore
if len(resolution) != 3:
print("Resolution tuple must have 3 values")
return 2
if any(x <= 0 for x in resolution):
print("Resolution component has to be > 0")
return 3
block_size = int(block_size)
block_shape = (block_size, block_size, block_size)
output_path = Path(output) if output else output
Expand All @@ -19,6 +31,7 @@ def encode_segmentation(
block_shape,
delete_existing_output_directory=not skip_existing,
output_path=output_path,
resolution=resolution, # type: ignore
)
return 0

Expand Down Expand Up @@ -46,6 +59,13 @@ def parse_args(args):
subcommand.add_argument(
"-b", "--block-size", required=False, default=64, help="Block size"
)
subcommand.add_argument(
"-r",
"--resolution",
nargs="+",
type=float,
help="Resolution, must be either 3 values for X Y Z separated by spaces, or a single value that will be set for X Y and Z",
)
subcommand.set_defaults(func=encode_segmentation)

return parser.parse_args(args)
Expand Down
19 changes: 11 additions & 8 deletions src/cryo_et_neuroglancer/write_segmentation.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ def _create_metadata(
block_size: tuple[int, int, int],
data_size: tuple[int, int, int],
data_directory: str,
resolution: tuple[int, int, int] = (1, 1, 1),
) -> dict[str, Any]:
"""Create the metadata for the segmentation"""
metadata = {
Expand All @@ -27,15 +28,14 @@ def _create_metadata(
"num_channels": 1,
"scales": [
{
"chunk_sizes": [list(chunk_size)],
"chunk_sizes": [chunk_size],
"encoding": "compressed_segmentation",
"compressed_segmentation_block_size": list(block_size),
# TODO resolution is in nm, while for others there is no units
"resolution": [1, 1, 1],
"compressed_segmentation_block_size": block_size,
"resolution": resolution,
"key": data_directory,
"size": data_size[
::-1
], # reverse the data size to pass from X-Y-Z to Z-Y-X
], # reverse the data size to pass from Z-Y-X to X-Y-Z
}
],
"type": "segmentation",
Expand All @@ -60,12 +60,15 @@ def main(
block_size: tuple[int, int, int] = (64, 64, 64),
data_directory: str = "data",
delete_existing_output_directory: bool = False,
output_path = None
output_path=None,
resolution: tuple[int, int, int] = (1, 1, 1),
) -> None:
"""Convert the given OME-Zarr file to neuroglancer segmentation format with the given block size"""
print(f"Converting {filename} to neuroglancer compressed segmentation format")
dask_data = load_omezarr_data(filename)
output_directory = output_path or filename.parent / f"precomputed-{filename.stem[:-5]}"
output_directory = (
output_path or filename.parent / f"precomputed-{filename.stem[:-5]}"
)
if delete_existing_output_directory and output_directory.exists():
print(
f"The output directory {output_directory!s} exists, deleting before starting the conversion"
Expand All @@ -79,7 +82,7 @@ def main(
c.write_to_directory(output_directory / data_directory)

metadata = _create_metadata(
dask_data.chunksize, block_size, dask_data.shape, data_directory
dask_data.chunksize, block_size, dask_data.shape, data_directory, resolution
)
write_metadata(metadata, output_directory)
print(f"Wrote segmentation to {output_directory}")
Expand Down
165 changes: 89 additions & 76 deletions src/tests/test_segmentation_encoding.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,95 +100,108 @@ def test__create_encoded_values():
buffer = bytearray() # will start in 0
offset = _create_encoded_values(buffer, np.array([1, 0, 2]), 2)
assert offset == 0
assert buffer == struct.pack('<I', 0b100001)
assert buffer == struct.pack("<I", 0b100001)

buffer = bytearray(8) # will start in 2
offset = _create_encoded_values(buffer, np.array([1, 0, 2]), 2)
assert offset == 2
assert buffer == struct.pack('<QI', 0, 0b100001) # we need to pad manually for the test
assert buffer == struct.pack(
"<QI", 0, 0b100001
) # we need to pad manually for the test


def test__create_file_chunck_header():
buffer = _create_file_chunk_header()
assert buffer == struct.pack('<I', 1)
assert buffer == struct.pack("<I", 1)


def test__create_segmentation_chunk():
# We take a small 8x8 cube
array = [
[[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1]],

[[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1]],

[[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1]],

[[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1]],

[[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1]],

[[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1]],

[[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1]],

[[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1]],
[
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
],
[
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
],
[
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
],
[
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
],
[
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
],
[
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
],
[
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
],
[
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
[0, 0, 0, 0],
[1, 1, 1, 1],
],
]
chunk: Chunk = create_segmentation_chunk(np.array(array), dimensions=((0, 0, 0), (8, 8, 8)), block_size=(8, 8, 8))
chunk: Chunk = create_segmentation_chunk(
np.array(array), dimensions=((0, 0, 0), (8, 8, 8)), block_size=(8, 8, 8)
)

assert chunk.dimensions == ((0, 0, 0), (8, 8, 8))
# TODO expand me!
# TODO expand me!

0 comments on commit 1d96bf1

Please sign in to comment.