From 762a2c53b33f5c966abd394f93e8a30600b4f931 Mon Sep 17 00:00:00 2001 From: Aaron Lun Date: Tue, 30 Apr 2024 17:10:25 -0700 Subject: [PATCH] Expose internal functions for reading sparse matrices from HDF5. (#86) For dense matrices, the user is responsible for specifying whether the transposition was performed; for sparse matrices, the user specifies the dimension extents and CSC/CSR layouts. This allows creation of a ScranMatrix from HDF5 files that are not in the typical formats (i.e., H5AD/10X). --- js/index.js | 1 + js/initializeSparseMatrix.js | 97 ---------- js/initializeSparseMatrixFromHdf5.js | 171 +++++++++++++++++ src/read_hdf5_matrix.cpp | 190 +++++++++++++++---- tests/initializeSparseMatrixFromHdf5.test.js | 31 ++- 5 files changed, 351 insertions(+), 139 deletions(-) create mode 100644 js/initializeSparseMatrixFromHdf5.js diff --git a/js/index.js b/js/index.js index 0f3afc8e..91b1d824 100644 --- a/js/index.js +++ b/js/index.js @@ -2,6 +2,7 @@ export { initialize, terminate, wasmArraySpace, heapSize, maximumThreads } from export { createUint8WasmArray, createInt32WasmArray, createFloat64WasmArray, free } from "./utils.js"; export * from "./initializeSparseMatrix.js"; +export * from "./initializeSparseMatrixFromHdf5.js"; export * from "./rds.js"; export * from "./file.js"; diff --git a/js/initializeSparseMatrix.js b/js/initializeSparseMatrix.js index d3f4c222..de3898e3 100644 --- a/js/initializeSparseMatrix.js +++ b/js/initializeSparseMatrix.js @@ -216,103 +216,6 @@ export function extractMatrixMarketDimensions(x, { compressed = null } = {}) { return output; } -/** - * Initialize a layered sparse matrix from a HDF5 file. - * - * @param {string} file Path to the HDF5 file. - * For browsers, the file should have been saved to the virtual filesystem. - * @param {string} name Name of the dataset inside the file. - * This can be a HDF5 Dataset for dense matrices or a HDF5 Group for sparse matrices. - * For the latter, both H5AD and 10X-style sparse formats are supported. - * @param {object} [options={}] - Optional parameters. - * @param {boolean} [options.forceInteger=true] - Whether to coerce all elements to integers via truncation. - * @param {boolean} [options.layered=true] - Whether to create a layered sparse matrix, see [**tatami_layered**](https://github.com/tatami-inc/tatami_layered) for more details. - * Only used if the relevant HDF5 dataset contains an integer type and/or `forceInteger = true`. - * Setting to `true` assumes that the matrix contains only non-negative integers. - * @param {?(Array|TypedArray|Int32WasmArray)} [options.subsetRow=null] - Row indices to extract. - * All indices must be non-negative integers less than the number of rows in the sparse matrix. - * @param {?(Array|TypedArray|Int32WasmArray)} [options.subsetColumn=null] - Column indices to extract. - * All indices must be non-negative integers less than the number of columns in the sparse matrix. - * - * @return {ScranMatrix} Matrix containing sparse data. - */ -export function initializeSparseMatrixFromHdf5(file, name, { forceInteger = true, layered = true, subsetRow = null, subsetColumn = null } = {}) { - var ids = null; - var output; - let wasm_row, wasm_col; - - try { - let use_row_subset = (subsetRow !== null); - let row_offset = 0, row_length = 0; - if (use_row_subset) { - wasm_row = utils.wasmifyArray(subsetRow, "Int32WasmArray"); - row_offset = wasm_row.offset; - row_length = wasm_row.length; - } - - let use_col_subset = (subsetColumn !== null); - let col_offset = 0, col_length = 0; - if (use_col_subset) { - wasm_col = utils.wasmifyArray(subsetColumn, "Int32WasmArray"); - col_offset = wasm_col.offset; - col_length = wasm_col.length; - } - - output = gc.call( - module => module.read_hdf5_matrix(file, name, forceInteger, layered, use_row_subset, row_offset, row_length, use_col_subset, col_offset, col_length), - ScranMatrix - ); - - } catch(e) { - utils.free(output); - throw e; - } finally { - utils.free(wasm_row); - utils.free(wasm_col); - } - - return output; -} - -/** - * Extract the format and dimensions of a HDF5 matrix. - * - * @param {string} file Path to the HDF5 file. - * For browsers, the file should have been saved to the virtual filesystem. - * @param {string} name Name of the dataset inside the file. - * This can be a HDF5 Dataset for dense matrices or a HDF5 Group for sparse matrices. - * For the latter, both H5AD and 10X-style sparse formats are supported. - * - * @return {object} An object containing: - * - `rows`, the number of rows in the matrix. - * - `columns`, the number of columns. - * - `format`, whether the matrix is dense, CSR or CSC. - * - `integer`, whether the matrix data is stored as integers or doubles. - */ -export function extractHdf5MatrixDetails(file, name) { - let output = {}; - let arr = utils.createInt32WasmArray(5); - try { - wasm.call(module => module.extract_hdf5_matrix_details(file, name, arr.offset)); - - let vals = arr.array(); - if (vals[0] > 0) { - output.format = "dense"; - } else if (vals[1] > 0) { - output.format = "csc"; - } else { - output.format = "csr"; - } - - output.rows = vals[2]; - output.columns = vals[3]; - output.integer = vals[4] > 0; - } finally { - arr.free(); - } - return output; -} - /** * Initialize a dense matrix from a column-major array. * diff --git a/js/initializeSparseMatrixFromHdf5.js b/js/initializeSparseMatrixFromHdf5.js new file mode 100644 index 00000000..243ce324 --- /dev/null +++ b/js/initializeSparseMatrixFromHdf5.js @@ -0,0 +1,171 @@ +import * as gc from "./gc.js"; +import * as wasm from "./wasm.js"; +import * as utils from "./utils.js"; +import { ScranMatrix } from "./ScranMatrix.js"; + +/** + * Initialize a (potentially layered) sparse matrix from a HDF5 file, + * either from a dense array (via {@linkcode initializeSparseMatrixFromHdf5DenseArray}) + * or a group containing a compressed sparse matrix (via {@linkcode initializeSparseMatrixFromHdf5SparseMatrix}). + * + * @param {string} file Path to the HDF5 file. + * For browsers, the file should have been saved to the virtual filesystem. + * @param {string} name Name of the matrix inside the file. + * This can be a HDF5 Dataset for dense matrices or a HDF5 Group for sparse matrices. + * For the latter, both H5AD and 10X-style sparse formats are supported. + * @param {object} [options={}] - Optional parameters. + * @param {boolean} [options.forceInteger=true] - Whether to coerce all elements to integers via truncation. + * @param {boolean} [options.layered=true] - Whether to create a layered sparse matrix, see [**tatami_layered**](https://github.com/tatami-inc/tatami_layered) for more details. + * Only used if the relevant HDF5 dataset contains an integer type and/or `forceInteger = true`. + * Setting to `true` assumes that the matrix contains only non-negative integers. + * @param {?(Array|TypedArray|Int32WasmArray)} [options.subsetRow=null] - Row indices to extract. + * All indices must be non-negative integers less than the number of rows in the sparse matrix. + * @param {?(Array|TypedArray|Int32WasmArray)} [options.subsetColumn=null] - Column indices to extract. + * All indices must be non-negative integers less than the number of columns in the sparse matrix. + * + * @return {ScranMatrix} Matrix containing sparse data. + */ +export function initializeSparseMatrixFromHdf5(file, name, { forceInteger = true, layered = true, subsetRow = null, subsetColumn = null } = {}) { + const details = extractHdf5MatrixDetails(file, name); + if (details.format == "dense") { + // Setting transposed = true as all known dense matrices store the cells in the first dimension and the genes in the last dimension. + return initializeSparseMatrixFromHdf5DenseArray(file, name, { transposed: true, forceInteger, layered, subsetRow, subsetColumn }); + } else { + return initializeSparseMatrixFromHdf5SparseMatrix(file, name, details.rows, details.columns, details.format == "csc", { forceInteger, layered, subsetRow, subsetColumn }); + } +} + +function prepare_hdf5_matrix_subset(subsetRow, subsetColumn, fun) { + var output; + let wasm_row, wasm_col; + + try { + let use_row_subset = (subsetRow !== null); + let row_offset = 0, row_length = 0; + if (use_row_subset) { + wasm_row = utils.wasmifyArray(subsetRow, "Int32WasmArray"); + row_offset = wasm_row.offset; + row_length = wasm_row.length; + } + + let use_col_subset = (subsetColumn !== null); + let col_offset = 0, col_length = 0; + if (use_col_subset) { + wasm_col = utils.wasmifyArray(subsetColumn, "Int32WasmArray"); + col_offset = wasm_col.offset; + col_length = wasm_col.length; + } + + output = fun(use_row_subset, row_offset, row_length, use_col_subset, col_offset, col_length); + + } finally { + utils.free(wasm_row); + utils.free(wasm_col); + } + + return output; +} + +/** + * Initialize a (potentially layered) sparse matrix from a two-dimensional HDF5 dataset. + * + * @param {string} file Path to the HDF5 file. + * For browsers, the file should have been saved to the virtual filesystem. + * @param {string} name Name of the dataset inside the file. + * @param {object} [options={}] - Optional parameters. + * @param {boolean} [options.transposed=false] - Whether the matrix is transposed inside the file, i.e., the last dimension represents the rows. + * @param {boolean} [options.forceInteger=false] - Whether to coerce all elements to integers via truncation. + * @param {boolean} [options.layered=true] - Whether to create a layered sparse matrix, see [**tatami_layered**](https://github.com/tatami-inc/tatami_layered) for more details. + * Only used if the relevant HDF5 dataset contains an integer type and/or `forceInteger = true`. + * Setting to `true` assumes that the matrix contains only non-negative integers. + * @param {?(Array|TypedArray|Int32WasmArray)} [options.subsetRow=null] - Row indices to extract. + * All indices must be non-negative integers less than the number of rows in the sparse matrix. + * @param {?(Array|TypedArray|Int32WasmArray)} [options.subsetColumn=null] - Column indices to extract. + * All indices must be non-negative integers less than the number of columns in the sparse matrix. + * + * @return {ScranMatrix} Matrix containing sparse data. + */ +export function initializeSparseMatrixFromHdf5DenseArray(file, name, { transposed = false, forceInteger = false, layered = true, subsetRow = null, subsetColumn = null } = {}) { + return prepare_hdf5_matrix_subset(subsetRow, subsetColumn, (use_row_subset, row_offset, row_length, use_col_subset, col_offset, col_length) => { + return gc.call( + module => module.read_sparse_matrix_from_hdf5_dense_array( + file, name, transposed, forceInteger, layered, + use_row_subset, row_offset, row_length, use_col_subset, col_offset, col_length + ), + ScranMatrix + ); + }); +} + +/** + * Initialize a (potentially layered) sparse matrix from a HDF5 group containing the usual `data`, `indices`, and `indptr` components of a compressed sparse matrix. + * + * @param {string} file - Path to the HDF5 file. + * For browsers, the file should have been saved to the virtual filesystem. + * @param {string} name - Name of the dataset inside the file. + * @param {number} numberOfRows - Number of rows in the matrix. + * @param {number} numberOfColumns - Number of columns in the matrix. + * @param {boolean} byColumn - Whether the matrix is in the compressed sparse column (CSC) format. + * If false, the format is assumed to be compressed sparse row (CSR) instead. + * @param {object} [options={}] - Optional parameters. + * @param {boolean} [options.forceInteger=false] - Whether to coerce all elements to integers via truncation. + * @param {boolean} [options.layered=true] - Whether to create a layered sparse matrix, see [**tatami_layered**](https://github.com/tatami-inc/tatami_layered) for more details. + * Only used if the relevant HDF5 dataset contains an integer type and/or `forceInteger = true`. + * Setting to `true` assumes that the matrix contains only non-negative integers. + * @param {?(Array|TypedArray|Int32WasmArray)} [options.subsetRow=null] - Row indices to extract. + * All indices must be non-negative integers less than the number of rows in the sparse matrix. + * @param {?(Array|TypedArray|Int32WasmArray)} [options.subsetColumn=null] - Column indices to extract. + * All indices must be non-negative integers less than the number of columns in the sparse matrix. + * + * @return {ScranMatrix} Matrix containing sparse data. + */ +export function initializeSparseMatrixFromHdf5SparseMatrix(file, name, numberOfRows, numberOfColumns, byColumn, { forceInteger = false, layered = true, subsetRow = null, subsetColumn = null } = {}) { + return prepare_hdf5_matrix_subset(subsetRow, subsetColumn, (use_row_subset, row_offset, row_length, use_col_subset, col_offset, col_length) => { + return gc.call( + module => module.read_sparse_matrix_from_hdf5_sparse_matrix( + file, name, numberOfRows, numberOfColumns, byColumn, forceInteger, layered, + use_row_subset, row_offset, row_length, use_col_subset, col_offset, col_length + ), + ScranMatrix + ); + }); +} + +/** + * Extract the format and dimensions of a HDF5 matrix. + * + * @param {string} file Path to the HDF5 file. + * For browsers, the file should have been saved to the virtual filesystem. + * @param {string} name Name of the dataset inside the file. + * This can be a HDF5 Dataset for dense matrices or a HDF5 Group for sparse matrices. + * For the latter, both H5AD and 10X-style sparse formats are supported. + * + * @return {object} An object containing: + * - `rows`, the number of rows in the matrix. + * - `columns`, the number of columns. + * - `format`, whether the matrix is dense, CSR or CSC. + * - `integer`, whether the matrix data is stored as integers or doubles. + */ +export function extractHdf5MatrixDetails(file, name) { + let output = {}; + let arr = utils.createInt32WasmArray(5); + try { + wasm.call(module => module.extract_hdf5_matrix_details(file, name, arr.offset)); + + let vals = arr.array(); + if (vals[0] > 0) { + output.format = "dense"; + } else if (vals[1] > 0) { + output.format = "csc"; + } else { + output.format = "csr"; + } + + output.rows = vals[2]; + output.columns = vals[3]; + output.integer = vals[4] > 0; + } finally { + arr.free(); + } + return output; +} diff --git a/src/read_hdf5_matrix.cpp b/src/read_hdf5_matrix.cpp index 88905c3e..e54d637d 100644 --- a/src/read_hdf5_matrix.cpp +++ b/src/read_hdf5_matrix.cpp @@ -113,13 +113,38 @@ void extract_hdf5_matrix_details(std::string path, std::string name, uintptr_t p } template -NumericMatrix read_hdf5_matrix_internal( - size_t nr, - size_t nc, - bool is_dense, - bool csc, +NumericMatrix apply_post_processing( + std::shared_ptr > mat, + bool layered, + bool row_subset, + uintptr_t row_offset, + int row_length, + bool col_subset, + uintptr_t col_offset, + int col_length) +{ + if (row_subset) { + auto offset_ptr = reinterpret_cast(row_offset); + check_subset_indices(offset_ptr, row_length, mat->nrow()); + auto smat = tatami::make_DelayedSubset<0>(std::move(mat), std::vector(offset_ptr, offset_ptr + row_length)); + mat = std::move(smat); + } + + if (col_subset) { + auto offset_ptr = reinterpret_cast(col_offset); + check_subset_indices(offset_ptr, col_length, mat->ncol()); + auto smat = tatami::make_DelayedSubset<1>(std::move(mat), std::vector(offset_ptr, offset_ptr + col_length)); + mat = std::move(smat); + } + + return sparse_from_tatami(mat.get(), layered); +} + +template +NumericMatrix read_sparse_matrix_from_hdf5_dense_array_internal( const std::string& path, - const std::string name, + const std::string& name, + bool trans, bool layered, bool row_subset, uintptr_t row_offset, @@ -128,47 +153,124 @@ NumericMatrix read_hdf5_matrix_internal( uintptr_t col_offset, int col_length) { - if (!is_dense && !csc && !layered && !row_subset && !col_subset) { - return NumericMatrix(new tatami::CompressedSparseRowMatrix >( - tatami_hdf5::load_hdf5_compressed_sparse_matrix >(nr, nc, path, name + "/data", name + "/indices", name + "/indptr") - )); + std::shared_ptr > mat; - } else { - std::shared_ptr > mat; - try { - if (is_dense) { - mat.reset(new tatami_hdf5::Hdf5DenseMatrix(path, name)); - } else if (csc) { - mat.reset(new tatami_hdf5::Hdf5CompressedSparseMatrix(nr, nc, path, name + "/data", name + "/indices", name + "/indptr")); - } else { - mat.reset(new tatami_hdf5::Hdf5CompressedSparseMatrix(nr, nc, path, name + "/data", name + "/indices", name + "/indptr")); - } + try { + if (trans) { + mat.reset(new tatami_hdf5::Hdf5DenseMatrix(path, name)); + } else { + mat.reset(new tatami_hdf5::Hdf5DenseMatrix(path, name)); + } + } catch (H5::Exception& e) { + throw std::runtime_error(e.getCDetailMsg()); + } + + return apply_post_processing( + std::move(mat), + layered, + row_subset, + row_offset, + row_length, + col_subset, + col_offset, + col_length + ); +} +NumericMatrix read_sparse_matrix_from_hdf5_dense_array( + std::string path, + std::string name, + bool trans, + bool force_integer, + bool layered, + bool row_subset, + uintptr_t row_offset, + int row_length, + bool col_subset, + uintptr_t col_offset, + int col_length) +{ + bool as_integer = force_integer; + if (!force_integer) { + try { + H5::H5File handle(path, H5F_ACC_RDONLY); + auto dhandle = handle.openDataSet(name); + as_integer = dhandle.getTypeClass() == H5T_INTEGER; } catch (H5::Exception& e) { throw std::runtime_error(e.getCDetailMsg()); } + } + + if (as_integer) { + return read_sparse_matrix_from_hdf5_dense_array_internal(path, name, trans, layered, row_subset, row_offset, row_length, col_subset, col_offset, col_length); + } else { + return read_sparse_matrix_from_hdf5_dense_array_internal(path, name, trans, false, row_subset, row_offset, row_length, col_subset, col_offset, col_length); + } +} + +template +NumericMatrix read_sparse_matrix_from_hdf5_sparse_matrix_internal( + const std::string& path, + const std::string& name, + int nr, + int nc, + bool csc, + bool layered, + bool row_subset, + uintptr_t row_offset, + int row_length, + bool col_subset, + uintptr_t col_offset, + int col_length) +{ + if (!layered && !csc && !row_subset && !col_subset) { + std::shared_ptr > mat; - if (row_subset) { - auto offset_ptr = reinterpret_cast(row_offset); - check_subset_indices(offset_ptr, row_length, mat->nrow()); - auto smat = tatami::make_DelayedSubset<0>(std::move(mat), std::vector(offset_ptr, offset_ptr + row_length)); - mat = std::move(smat); + // Don't do the same with CSC matrices; there is an implicit + // expectation that all instances of this function prefer row matrices, + // and if we did it with CSC, we'd get a column-major matrix instead. + try { + mat.reset(new tatami::CompressedSparseRowMatrix >( + tatami_hdf5::load_hdf5_compressed_sparse_matrix >(nr, nc, path, name + "/data", name + "/indices", name + "/indptr") + )); + } catch (H5::Exception& e) { + throw std::runtime_error(e.getCDetailMsg()); } - if (col_subset) { - auto offset_ptr = reinterpret_cast(col_offset); - check_subset_indices(offset_ptr, col_length, mat->ncol()); - auto smat = tatami::make_DelayedSubset<1>(std::move(mat), std::vector(offset_ptr, offset_ptr + col_length)); - mat = std::move(smat); + return NumericMatrix(std::move(mat)); + + } else { + std::shared_ptr > mat; + + try { + if (!csc) { + mat.reset(new tatami_hdf5::Hdf5CompressedSparseMatrix(nr, nc, path, name + "/data", name + "/indices", name + "/indptr")); + } else { + mat.reset(new tatami_hdf5::Hdf5CompressedSparseMatrix(nr, nc, path, name + "/data", name + "/indices", name + "/indptr")); + } + } catch (H5::Exception& e) { + throw std::runtime_error(e.getCDetailMsg()); } - return sparse_from_tatami(mat.get(), layered); + return apply_post_processing( + std::move(mat), + layered, + row_subset, + row_offset, + row_length, + col_subset, + col_offset, + col_length + ); } } -NumericMatrix read_hdf5_matrix( +NumericMatrix read_sparse_matrix_from_hdf5_sparse_matrix( std::string path, std::string name, + int nr, + int nc, + bool csc, bool force_integer, bool layered, bool row_subset, @@ -178,20 +280,26 @@ NumericMatrix read_hdf5_matrix( uintptr_t col_offset, int col_length) { - auto details = extract_hdf5_matrix_details_internal(path, name); - const auto& is_dense = details.is_dense; - const auto& csc = details.csc; - const auto& nr = details.nr; - const auto& nc = details.nc; + bool as_integer = force_integer; + if (!force_integer) { + try { + H5::H5File handle(path, H5F_ACC_RDONLY); + auto dhandle = handle.openDataSet(name + "/data"); + as_integer = dhandle.getTypeClass() == H5T_INTEGER; + } catch (H5::Exception& e) { + throw std::runtime_error(e.getCDetailMsg()); + } + } - if (force_integer || details.is_integer) { - return read_hdf5_matrix_internal(nr, nc, is_dense, csc, path, name, layered, row_subset, row_offset, row_length, col_subset, col_offset, col_length); + if (as_integer) { + return read_sparse_matrix_from_hdf5_sparse_matrix_internal(path, name, nr, nc, csc, layered, row_subset, row_offset, row_length, col_subset, col_offset, col_length); } else { - return read_hdf5_matrix_internal(nr, nc, is_dense, csc, path, name, false, row_subset, row_offset, row_length, col_subset, col_offset, col_length); + return read_sparse_matrix_from_hdf5_sparse_matrix_internal(path, name, nr, nc, csc, false, row_subset, row_offset, row_length, col_subset, col_offset, col_length); } } EMSCRIPTEN_BINDINGS(read_hdf5_matrix) { - emscripten::function("read_hdf5_matrix", &read_hdf5_matrix); emscripten::function("extract_hdf5_matrix_details", &extract_hdf5_matrix_details); + emscripten::function("read_sparse_matrix_from_hdf5_dense_array", &read_sparse_matrix_from_hdf5_dense_array); + emscripten::function("read_sparse_matrix_from_hdf5_sparse_matrix", &read_sparse_matrix_from_hdf5_sparse_matrix); } diff --git a/tests/initializeSparseMatrixFromHdf5.test.js b/tests/initializeSparseMatrixFromHdf5.test.js index 165ae3ba..f07f0f2b 100644 --- a/tests/initializeSparseMatrixFromHdf5.test.js +++ b/tests/initializeSparseMatrixFromHdf5.test.js @@ -60,9 +60,17 @@ test("initialization from HDF5 works correctly with dense inputs", () => { expect(compare.equalArrays(mat2.column(0), first_col)).toBe(true); expect(compare.equalArrays(mat2.column(19), last_col)).toBe(true); + // Checking that it works when untransposed. + var mat3 = scran.initializeSparseMatrixFromHdf5DenseArray(path, "stuff", { transposed: false }); + expect(mat3.numberOfRows()).toBe(mat2.numberOfColumns()); + expect(mat3.numberOfColumns()).toBe(mat2.numberOfRows()); + expect(compare.equalArrays(mat3.column(0), mat2.row(0))).toBe(true); + expect(compare.equalArrays(mat3.row(0), mat2.column(0))).toBe(true); + // Freeing. mat.free(); mat2.free(); + mat3.free(); }) test("dense initialization from HDF5 works correctly with forced integers", () => { @@ -155,6 +163,10 @@ test("initialization from HDF5 works correctly with 10X inputs", () => { // Integer status is automatically detected, allowing the layering to be attempted. var mat2 = scran.initializeSparseMatrixFromHdf5(path, "foobar", { forceInteger: false }); + expect(mat2.numberOfRows()).toBe(mat.numberOfRows()); + expect(mat2.numberOfColumns()).toBe(mat.numberOfColumns()); + expect(compare.equalArrays(mat2.row(0), mat.row(0))).toBe(true); + expect(compare.equalArrays(mat2.column(0), mat.column(0))).toBe(true); // Freeing. mat.free(); @@ -205,11 +217,20 @@ test("initialization from HDF5 works correctly with H5AD inputs", () => { var mat2 = scran.initializeSparseMatrixFromHdf5(path, "layers/counts", { layered: false }); expect(mat2.numberOfRows()).toBe(nr); expect(mat2.numberOfColumns()).toBe(nc); - expect(compare.equalArrays(mat2.row(0), ref)).toBe(true); + expect(compare.equalArrays(mat2.row(0), mat.row(0))).toBe(true); + expect(compare.equalArrays(mat2.column(0), mat.column(0))).toBe(true); + + // Using raw access. + var mat3 = scran.initializeSparseMatrixFromHdf5SparseMatrix(path, "layers/counts", nr, nc, false); + expect(mat3.numberOfRows()).toBe(nr); + expect(mat3.numberOfColumns()).toBe(nc); + expect(compare.equalArrays(mat3.row(0), mat.row(0))).toBe(true); + expect(compare.equalArrays(mat3.column(0), mat.column(0))).toBe(true); // Freeing. mat.free(); mat2.free(); + mat3.free(); }) test("initialization from HDF5 works correctly with forced integers", () => { @@ -253,9 +274,17 @@ test("initialization from HDF5 works correctly with forced integers", () => { } } + // Using raw access. + var mat3 = scran.initializeSparseMatrixFromHdf5SparseMatrix(path, "foobar", nr, nc, true, { forceInteger: true, layered: true }); + expect(mat3.numberOfRows()).toBe(nr); + expect(mat3.numberOfColumns()).toBe(nc); + expect(compare.equalArrays(mat3.row(0), mat1.row(0))).toBe(true); + expect(compare.equalArrays(mat3.column(0), mat1.column(0))).toBe(true); + // Freeing. mat1.free(); mat2.free(); + mat3.free(); }) test("initialization from HDF5 works correctly with subsetting", () => {