Skip to content

Commit

Permalink
Added a columnMajor= option for dense array initialization.
Browse files Browse the repository at this point in the history
This makes the special ScranMatrix constructor redundant.
  • Loading branch information
LTLA committed Oct 12, 2024
1 parent 958e9b3 commit 02379c8
Show file tree
Hide file tree
Showing 6 changed files with 24 additions and 64 deletions.
24 changes: 0 additions & 24 deletions js/ScranMatrix.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,30 +16,6 @@ export class ScranMatrix {
return;
}

/**
* Create a dense matrix from an existing Wasm-allocated buffer.
*
* @param {number} rows - Number of rows.
* @param {number} columns - Number of columns.
* @param {Float64WasmArray} contents - Array of matrix contents.
* @param {object} [options={}] - Optional parameters.
* @param {boolean} [options.columnMajor=true] - Whether the array in `contents` is column-major.
* @param {boolean} [options.copy=true] - Whether to copy `contents` when constructing the {@linkplain ScranMatrix}.
* If `false`, the returned {@linkplain ScranMatrix} will refer to the same allocation as `contents`,
* so callers should make sure that it does not outlive `contents`.
*
* @return {ScranMatrix} A {@linkplain ScranMatrix} containing the matrix contents.
*/
static createDenseMatrix(rows, columns, contents, { columnMajor = true , copy = true } = {}) {
if (!(contents instanceof wa.Float64WasmArray)) {
throw new Error("'contents' should be a Float64WasmArray");
}
if (contents.length != rows * columns) {
throw new Error("length of 'contents' should equal the product of 'rows' and 'columns'");
}
return gc.call(module => new module.NumericMatrix(rows, columns, contents.offset, columnMajor, copy), ScranMatrix);
}

/**
* @return {ScranMatrix} A clone of the current ScranMatrix instance.
* This can be freed independently of the current instance.
Expand Down
18 changes: 12 additions & 6 deletions js/initializeSparseMatrixFromArrays.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,20 @@ import * as utils from "./utils.js";
import { ScranMatrix } from "./ScranMatrix.js";

/**
* Initialize a dense matrix from a dense array in column-major format.
* Initialize a dense matrix from a dense array.
*
* @param {number} numberOfRows Number of rows in the matrix.
* @param {number} numberOfColumns Number of columns in the matrix.
* @param {WasmArray|Array|TypedArray} values Values of all elements in the matrix, stored in column-major order.
* @param {WasmArray|Array|TypedArray} values Values of all elements in the matrix.
* This is generally expected to contain non-negative integers; otherwise, users should set `forceInteger = false`.
* @param {object} [options={}] - Optional parameters.
* @param {boolean} [options.columnMajor=true] - Whether `values` contains the matrix in a column-major order.
* @param {boolean} [options.forceInteger=false] - Whether to coerce `values` to integers via truncation.
*
* @return {ScranMatrix} Matrix containing dense data.
*/
export function initializeDenseMatrixFromDenseArray(numberOfRows, numberOfColumns, values, options = {}) {
const { forceInteger = false, ...others } = options;
const { columnMajor = true, forceInteger = false, ...others } = options;
utils.checkOtherOptions(others);

var val_data;
Expand All @@ -33,6 +35,7 @@ export function initializeDenseMatrixFromDenseArray(numberOfRows, numberOfColumn
numberOfColumns,
val_data.offset,
val_data.constructor.className.replace("Wasm", ""),
columnMajor,
forceInteger
),
ScranMatrix
Expand All @@ -54,9 +57,11 @@ export function initializeDenseMatrixFromDenseArray(numberOfRows, numberOfColumn
*
* @param {number} numberOfRows Number of rows in the matrix.
* @param {number} numberOfColumns Number of columns in the matrix.
* @param {WasmArray|Array|TypedArray} values Values of all elements in the matrix, stored in column-major order.
* @param {WasmArray|Array|TypedArray} values Values of all elements in the matrix.
* This is generally expected to contain non-negative integers; otherwise, users should set `forceInteger = false`.
* @param {boolean} columnMajor - Whether `values` contains the matrix in a column-major order.
* @param {object} [options={}] - Optional parameters.
* @param {boolean} [options.columnMajor=true] - Whether `values` contains the matrix in a column-major order.
* @param {boolean} [options.forceInteger=true] - Whether to coerce `values` to integers via truncation.
* @param {boolean} [options.layered=true] - Whether to create a layered sparse matrix, see [**tatami_layered**](https://github.com/tatami-inc/tatami_layered) for more details.
* Only used if `values` contains an integer type and/or `forceInteger = true`.
Expand All @@ -65,7 +70,7 @@ export function initializeDenseMatrixFromDenseArray(numberOfRows, numberOfColumn
* @return {ScranMatrix} Matrix containing sparse data.
*/
export function initializeSparseMatrixFromDenseArray(numberOfRows, numberOfColumns, values, options = {}) {
const { forceInteger = true, layered = true, ...others } = options;
const { columnMajor = true, forceInteger = true, layered = true, ...others } = options;
utils.checkOtherOptions(others);

var val_data;
Expand All @@ -83,6 +88,7 @@ export function initializeSparseMatrixFromDenseArray(numberOfRows, numberOfColum
numberOfColumns,
val_data.offset,
val_data.constructor.className.replace("Wasm", ""),
columnMajor,
forceInteger,
layered
),
Expand Down Expand Up @@ -112,7 +118,7 @@ export function initializeSparseMatrixFromDenseArray(numberOfRows, numberOfColum
* @param {WasmArray} pointers Pointers specifying the start of each column in `indices`.
* This should have length equal to `numberOfColumns + 1`.
* @param {object} [options={}] - Optional parameters.
* @param {boolean} [options.byRow=true] - Whether the input arrays are supplied in the compressed sparse column format.
* @param {boolean} [options.byRow=true] - Whether the input arrays are supplied in the compressed sparse row format.
* If `true`, `indices` should contain column indices and `pointers` should specify the start of each row in `indices`.
* @param {boolean} [options.forceInteger=true] - Whether to coerce `values` to integers via truncation.
* @param {boolean} [options.layered=true] - Whether to create a layered sparse matrix, see [**tatami_layered**](https://github.com/tatami-inc/tatami_layered) for more details.
Expand Down
20 changes: 0 additions & 20 deletions src/NumericMatrix.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -7,31 +7,12 @@ NumericMatrix::NumericMatrix(const tatami::NumericMatrix* p) : ptr(p) {}

NumericMatrix::NumericMatrix(std::shared_ptr<const tatami::NumericMatrix> p) : ptr(std::move(p)) {}

template<class Vector_>
tatami::NumericMatrix* create_NumericMatrix(int32_t nr, int32_t nc, Vector_ vec, bool colmajor) {
if (colmajor) {
return new tatami::DenseColumnMatrix<double, int32_t, Vector_>(nr, nc, std::move(vec));
} else {
return new tatami::DenseRowMatrix<double, int32_t, Vector_>(nr, nc, std::move(vec));
}
}

void NumericMatrix::reset_ptr(std::shared_ptr<const tatami::NumericMatrix> p) {
ptr = std::move(p);
by_row.reset();
by_column.reset();
}

NumericMatrix::NumericMatrix(int32_t nr, int32_t nc, uintptr_t values, bool colmajor, bool copy) {
size_t product = static_cast<size_t>(nr) * static_cast<size_t>(nc);
auto iptr = reinterpret_cast<const double*>(values);
if (!copy) {
reset_ptr(std::shared_ptr<const tatami::NumericMatrix>(create_NumericMatrix(nr, nc, tatami::ArrayView<double>(iptr, product), colmajor)));
} else {
reset_ptr(std::shared_ptr<const tatami::NumericMatrix>(create_NumericMatrix(nr, nc, std::vector<double>(iptr, iptr + product), colmajor)));
}
}

int32_t NumericMatrix::nrow() const {
return ptr->nrow();
}
Expand Down Expand Up @@ -70,7 +51,6 @@ NumericMatrix NumericMatrix::clone() const {

EMSCRIPTEN_BINDINGS(NumericMatrix) {
emscripten::class_<NumericMatrix>("NumericMatrix")
.constructor<int32_t, int32_t, uintptr_t, bool, bool>(emscripten::return_value_policy::take_ownership())
.function("nrow", &NumericMatrix::nrow, emscripten::return_value_policy::take_ownership())
.function("ncol", &NumericMatrix::ncol, emscripten::return_value_policy::take_ownership())
.function("row", &NumericMatrix::row, emscripten::return_value_policy::take_ownership())
Expand Down
2 changes: 0 additions & 2 deletions src/NumericMatrix.h
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@ struct NumericMatrix {

NumericMatrix(std::shared_ptr<const tatami::NumericMatrix> p);

NumericMatrix(int32_t nr, int32_t nc, uintptr_t values, bool, bool);

public:
int32_t nrow() const;

Expand Down
20 changes: 10 additions & 10 deletions src/initialize_from_arrays.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -97,34 +97,34 @@ NumericMatrix initialize_from_sparse_arrays(size_t nrows, size_t ncols, size_t n
/**********************************/

template<typename T>
NumericMatrix initialize_sparse_matrix_from_dense_vector_internal(size_t nrows, size_t ncols, uintptr_t values, const std::string& type, bool layered) {
NumericMatrix initialize_sparse_matrix_from_dense_vector_internal(size_t nrows, size_t ncols, uintptr_t values, const std::string& type, bool column_major, bool layered) {
auto vals = create_SomeNumericArray<T>(values, nrows*ncols, type);
tatami::DenseColumnMatrix<T, int32_t, decltype(vals)> mat(nrows, ncols, vals);
tatami::DenseMatrix<T, int32_t, decltype(vals)> mat(nrows, ncols, vals, !column_major);
return sparse_from_tatami(mat, layered);
}

NumericMatrix initialize_sparse_matrix_from_dense_array(size_t nrows, size_t ncols, uintptr_t values, std::string type, bool force_integer, bool sparse, bool layered) {
NumericMatrix initialize_sparse_matrix_from_dense_array(size_t nrows, size_t ncols, uintptr_t values, std::string type, bool column_major, bool force_integer, bool layered) {
if (force_integer || is_type_integer(type)) {
return initialize_sparse_matrix_from_dense_vector_internal<int32_t>(nrows, ncols, values, type, layered);
return initialize_sparse_matrix_from_dense_vector_internal<int32_t>(nrows, ncols, values, type, column_major, layered);
} else {
return initialize_sparse_matrix_from_dense_vector_internal<double>(nrows, ncols, values, type, false);
return initialize_sparse_matrix_from_dense_vector_internal<double>(nrows, ncols, values, type, column_major, false);
}
}

template<typename T>
NumericMatrix initialize_dense_matrix_internal(size_t nrows, size_t ncols, uintptr_t values, const std::string& type) {
NumericMatrix initialize_dense_matrix_internal(size_t nrows, size_t ncols, uintptr_t values, const std::string& type, bool column_major) {
std::vector<T> tmp(nrows* ncols);
auto vals = create_SomeNumericArray<T>(values, nrows*ncols, type);
std::copy(vals.begin(), vals.end(), tmp.begin());
auto ptr = std::shared_ptr<const tatami::NumericMatrix>(new tatami::DenseColumnMatrix<double, int32_t, decltype(tmp)>(nrows, ncols, std::move(tmp)));
auto ptr = std::shared_ptr<const tatami::NumericMatrix>(new tatami::DenseMatrix<double, int32_t, decltype(tmp)>(nrows, ncols, std::move(tmp), !column_major));
return NumericMatrix(std::move(ptr));
}

NumericMatrix initialize_dense_matrix_from_dense_array(size_t nrows, size_t ncols, uintptr_t values, std::string type, bool force_integer) {
NumericMatrix initialize_dense_matrix_from_dense_array(size_t nrows, size_t ncols, uintptr_t values, std::string type, bool column_major, bool force_integer) {
if (force_integer || is_type_integer(type)) {
return initialize_dense_matrix_internal<int32_t>(nrows, ncols, values, type);
return initialize_dense_matrix_internal<int32_t>(nrows, ncols, values, type, column_major);
} else {
return initialize_dense_matrix_internal<double>(nrows, ncols, values, type);
return initialize_dense_matrix_internal<double>(nrows, ncols, values, type, column_major);
}
}

Expand Down
4 changes: 2 additions & 2 deletions tests/aggregateAcrossCells.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,13 @@ test("aggregation works as expected", () => {
expect(res.numberOfGenes()).toBe(ngenes);

let payload = res.allSums({ asTypedArray: false });
let agmat = scran.ScranMatrix.createDenseMatrix(res.numberOfGenes(), res.numberOfGroups(), payload);
let agmat = scran.initializeDenseMatrixFromDenseArray(res.numberOfGenes(), res.numberOfGroups(), payload);
expect(agmat.numberOfColumns()).toBe(3);
expect(agmat.numberOfRows()).toBe(ngenes);
payload.free();

payload = res.allDetected({ asTypedArray: false });
let dagmat = scran.ScranMatrix.createDenseMatrix(res.numberOfGenes(), res.numberOfGroups(), payload);
let dagmat = scran.initializeDenseMatrixFromDenseArray(res.numberOfGenes(), res.numberOfGroups(), payload);
expect(dagmat.numberOfColumns()).toBe(3);
expect(dagmat.numberOfRows()).toBe(ngenes);
payload.free();
Expand Down

0 comments on commit 02379c8

Please sign in to comment.