Skip to content

Commit

Permalink
h5ad without X
Browse files Browse the repository at this point in the history
  • Loading branch information
colganwi committed Dec 2, 2024
1 parent 1114645 commit 760a322
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 2 deletions.
10 changes: 10 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,16 @@ and this project adheres to [Semantic Versioning][].

### Fixed

## [0.1.2] - 2024-12-02

### Added

### Changed

### Fixed

- Fixed `KeyError: "Unable to synchronously open object (object 'X' doesn't exist)"'` when reading h5ad without X field (#40)

## [0.1.1] - 2024-11-25

### Added
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ requires = ["hatchling"]

[project]
name = "treedata"
version = "0.1.1"
version = "0.1.2"
description = "anndata with trees"
readme = "README.md"
requires-python = ">=3.10"
Expand Down
3 changes: 2 additions & 1 deletion src/treedata/_core/read.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,8 @@ def _read_tdata(f, filename, backed) -> dict:
backed = "r"
# Read X if not backed
if not backed:
d["X"] = _read_elem(f["X"])
if "X" in f:
d["X"] = _read_elem(f["X"])
else:
d.update({"filename": filename, "filemode": backed})
# Read standard elements
Expand Down
8 changes: 8 additions & 0 deletions tests/test_readwrite.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,14 @@ def test_read_anndata(X, tmp_path):
assert tdata.obst_keys() == []


def test_read_no_X(X, tmp_path):
tdata = td.TreeData(obs=pd.DataFrame(index=["0", "1", "2"]))
file_path = tmp_path / "test.h5ad"
tdata.write_h5ad(file_path)
tdata2 = td.read_h5ad(file_path)
assert tdata2.X is None


def test_h5ad_backing(tdata, tree, tmp_path):
tdata_copy = tdata.copy()
assert not tdata.isbacked
Expand Down

0 comments on commit 760a322

Please sign in to comment.