Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update data format #8

Merged
merged 1 commit into from
Nov 23, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 5 additions & 4 deletions create_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,10 @@ def write_metadata(in_xml, out_xml, out_path):
bucket_name=bucket_name,
path_in_bucket=path_in_bucket,
authentication='Anonymous',
bdv_type='bdv.zarr.s3')
bdv_type='ome.zarr.s3')

with z5py.File(out_path, 'r') as f:
shape = f['setup0/timepoint0/s0'].shape[2:]
shape = f['s0'].shape[2:]

# check if we need to update the shape and resolution
exp_shape = get_size(out_xml, setup_id=0)
Expand Down Expand Up @@ -71,7 +71,7 @@ def add_volume(in_path, vol_name, layer_type, start_scale=0):
# convert to ome zarr
convert_bdv_n5(in_path=in_path,
out_path=out_path,
out_key='setup0/timepoint0',
out_key='',
vol_name=vol_name,
use_nested_store=False,
n_threads=8,
Expand Down Expand Up @@ -117,4 +117,5 @@ def add_all_volumes():


if __name__ == '__main__':
add_all_volumes()
add_myosin()
# add_all_volumes()
2 changes: 1 addition & 1 deletion data/prospr-myosin.xml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
<first>0</first>
<last>0</last>
</Timepoints>
<ImageLoader format="bdv.zarr.s3">
<ImageLoader format="ome.zarr.s3">
<Key>prospr-myosin.ome.zarr</Key>
<SigningRegion>us-west-2</SigningRegion>
<ServiceEndpoint>https://s3.embl.de</ServiceEndpoint>
Expand Down
14 changes: 10 additions & 4 deletions data_conversion/to_ome_zarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,17 +136,23 @@ def convert_bdv_n5(in_path, out_path, out_key, vol_name,
if start_scale > 0:
scale_names = scale_names[start_scale:]

g_out = f_out.create_group(out_key)
write_at_root = out_key is None or out_key == ''
if write_at_root:
g_out = f_out
else:
g_out = f_out.create_group(out_key)
out_names = []

for sid, name in enumerate(scale_names):
ds_in = scale_group[name]
out_name = f"s{sid}"

store_path = os.path.join(out_path, out_name) if write_at_root else\
os.path.join(out_path, out_key, out_name)
if use_nested_store:
store = zarr.NestedDirectoryStore(os.path.join(out_path, out_key, out_name))
store = zarr.NestedDirectoryStore(store_path)
else:
store = zarr.DirectoryStore(os.path.join(out_path, out_key, out_name))
store = zarr.DirectoryStore(store_path)
ds_out = zarr.zeros(store=store,
shape=ds_in.shape,
chunks=ds_in.chunks,
Expand All @@ -157,7 +163,7 @@ def convert_bdv_n5(in_path, out_path, out_key, vol_name,

# this invalidates the shape and chunk attributes of our dataset,
# so we can't use it after that (but we also don't need to)
expand_dims(os.path.join(out_path, out_key, out_name), use_nested_store)
expand_dims(store_path, use_nested_store)
out_names.append(out_name)

assert len(out_names) == len(scales)
Expand Down