Skip to content

Commit

Permalink
setting try blocks to check airflow service
Browse files Browse the repository at this point in the history
  • Loading branch information
camilolaiton committed Jun 6, 2024
1 parent 8d9ab63 commit 149381b
Show file tree
Hide file tree
Showing 3 changed files with 27 additions and 18 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -597,7 +597,7 @@ def smartspim_channel_zarr_writer(
image_data = image_data.rechunk(final_chunksize)
image_data = pad_array_n_d(arr=image_data)

print(f"About to write {image_data} in {output_path}")
print(f"About to write {image_data} in {output_path} with stack name {stack_name}")

# Creating Zarr dataset
store = parse_url(path=output_path, mode="w").store
Expand Down Expand Up @@ -710,6 +710,7 @@ def smartspim_channel_zarr_writer(

end_time = time.time()
logger.info(f"Time to write the dataset: {end_time - start_time}")
print(f"Time to write the dataset: {end_time - start_time}")
logger.info(f"Written pyramid: {written_pyramid}")


Expand Down
39 changes: 22 additions & 17 deletions src/aind_smartspim_data_transformation/smartspim_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,6 @@ def _compress_and_write_channels(
read_channel_stacks: Iterator[tuple],
compressor: Blosc,
job_kwargs: dict,
output_format: str = "zarr",
):
"""
Compresses SmartSPIM image data.
Expand All @@ -145,26 +144,32 @@ def _compress_and_write_channels(
processes=True,
)

for delayed_arr, output_path, stack_name in read_channel_stacks:
smartspim_channel_zarr_writer(
image_data=delayed_arr,
output_path=output_path,
voxel_size=[2.0, 1.8, 1.8],
final_chunksize=(128, 128, 128),
scale_factor=[2, 2, 2],
n_lvls=4,
channel_name=output_path.stem,
stack_name=stack_name,
client=client,
logger=logging,
writing_options=compressor,
)

try:
for delayed_arr, output_path, stack_name in read_channel_stacks:
smartspim_channel_zarr_writer(
image_data=delayed_arr,
output_path=output_path,
voxel_size=[2.0, 1.8, 1.8],
final_chunksize=(128, 128, 128),
scale_factor=[2, 2, 2],
n_lvls=4,
channel_name=output_path.stem,
stack_name=stack_name,
client=client,
logger=logging,
writing_options=compressor,
)

except Exception as e:
print(f"Error converting array: {stack_name} {e}")
# Closing client
# with silence_logging_cmgr(logging.CRITICAL):
# client.shutdown()

_cleanup(deployment)
try:
_cleanup(deployment)
except Exception as e:
print(f"Error shutting down client: {e}")

def _compress_raw_data(self) -> None:
"""Compresses smartspim data"""
Expand Down
3 changes: 3 additions & 0 deletions tests/test_smartspim_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,9 @@ def test_failing_compressor(self):
failed_basic_job_settings = failed_basic_job_settings
SmartspimCompressionJob(job_settings=failed_basic_job_settings)

def test_compress_and_write_channels(self):
"""Tests SmartSPIM compression and zarr writing"""
pass

if __name__ == "__main__":
unittest.main()

0 comments on commit 149381b

Please sign in to comment.