diff --git a/create_data.py b/create_data.py
index 051e126..571bcce 100644
--- a/create_data.py
+++ b/create_data.py
@@ -21,10 +21,10 @@ def write_metadata(in_xml, out_xml, out_path):
bucket_name=bucket_name,
path_in_bucket=path_in_bucket,
authentication='Anonymous',
- bdv_type='bdv.zarr.s3')
+ bdv_type='ome.zarr.s3')
with z5py.File(out_path, 'r') as f:
- shape = f['setup0/timepoint0/s0'].shape[2:]
+ shape = f['s0'].shape[2:]
# check if we need to update the shape and resolution
exp_shape = get_size(out_xml, setup_id=0)
@@ -71,7 +71,7 @@ def add_volume(in_path, vol_name, layer_type, start_scale=0):
# convert to ome zarr
convert_bdv_n5(in_path=in_path,
out_path=out_path,
- out_key='setup0/timepoint0',
+ out_key='',
vol_name=vol_name,
use_nested_store=False,
n_threads=8,
@@ -117,4 +117,5 @@ def add_all_volumes():
if __name__ == '__main__':
- add_all_volumes()
+ add_myosin()
+ # add_all_volumes()
diff --git a/data/prospr-myosin.xml b/data/prospr-myosin.xml
index 94a5cd0..e9cd387 100644
--- a/data/prospr-myosin.xml
+++ b/data/prospr-myosin.xml
@@ -25,7 +25,7 @@
0
0
-
+
prospr-myosin.ome.zarr
us-west-2
https://s3.embl.de
diff --git a/data_conversion/to_ome_zarr.py b/data_conversion/to_ome_zarr.py
index 9e07117..890ef83 100644
--- a/data_conversion/to_ome_zarr.py
+++ b/data_conversion/to_ome_zarr.py
@@ -136,17 +136,23 @@ def convert_bdv_n5(in_path, out_path, out_key, vol_name,
if start_scale > 0:
scale_names = scale_names[start_scale:]
- g_out = f_out.create_group(out_key)
+ write_at_root = out_key is None or out_key == ''
+ if write_at_root:
+ g_out = f_out
+ else:
+ g_out = f_out.create_group(out_key)
out_names = []
for sid, name in enumerate(scale_names):
ds_in = scale_group[name]
out_name = f"s{sid}"
+ store_path = os.path.join(out_path, out_name) if write_at_root else\
+ os.path.join(out_path, out_key, out_name)
if use_nested_store:
- store = zarr.NestedDirectoryStore(os.path.join(out_path, out_key, out_name))
+ store = zarr.NestedDirectoryStore(store_path)
else:
- store = zarr.DirectoryStore(os.path.join(out_path, out_key, out_name))
+ store = zarr.DirectoryStore(store_path)
ds_out = zarr.zeros(store=store,
shape=ds_in.shape,
chunks=ds_in.chunks,
@@ -157,7 +163,7 @@ def convert_bdv_n5(in_path, out_path, out_key, vol_name,
# this invalidates the shape and chunk attributes of our dataset,
# so we can't use it after that (but we also don't need to)
- expand_dims(os.path.join(out_path, out_key, out_name), use_nested_store)
+ expand_dims(store_path, use_nested_store)
out_names.append(out_name)
assert len(out_names) == len(scales)