Skip to content

Commit

Permalink
maint: depend on stable moclib-rust and cdshealpix-rust. Also apply m…
Browse files Browse the repository at this point in the history
…odifications for to_vec_raw deprecated in ndarray
  • Loading branch information
ManonMarchand committed Oct 17, 2024
1 parent 0f19457 commit c8f6fc4
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 24 deletions.
8 changes: 4 additions & 4 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@ bench = true
crate-type = ["cdylib"]

[dependencies]
# moc = { version = "0.15", features = ["storage"] }
moc = { git = 'https://github.com/cds-astro/cds-moc-rust', rev = '361eb278fe782bfc053433495c33e3f16e20cdbd', features = ["storage"] }
healpix = { package = "cdshealpix", version = "0.6" }
moc = { version = "0.17", features = ["storage"] }
#moc = { git = 'https://github.com/cds-astro/cds-moc-rust', rev = '361eb278fe782bfc053433495c33e3f16e20cdbd', features = ["storage"] }
healpix = { package = "cdshealpix", version = "0.7" }
# healpix = { package = "cdshealpix", git = 'https://github.com/cds-astro/cds-healpix-rust', branch = 'master' }
rayon = "1.10"
num_threads = "0.1"
Expand All @@ -39,7 +39,7 @@ num_threads = "0.1"
version = "0.22"

[dependencies.ndarray]
version = "0.15"
version = "0.16"
default-features = false # do not include the default features, and optionally
# cherry-pick individual features
features = ["rayon"]
Expand Down
40 changes: 20 additions & 20 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1139,9 +1139,9 @@ fn mocpy(m: &Bound<'_, PyModule>) -> PyResult<()> {
lat: PyReadonlyArrayDyn<f64>,
d2: u8,
) -> PyResult<usize> {
let times = times.as_array().to_owned().into_raw_vec();
let lon = lon.as_array().to_owned().into_raw_vec();
let lat = lat.as_array().to_owned().into_raw_vec();
let times = times.to_vec().map_err(PyValueError::new_err)?;
let lon = lon.to_vec().map_err(PyValueError::new_err)?;
let lat = lat.to_vec().map_err(PyValueError::new_err)?;

U64MocStore::get_global_store()
.create_from_times_positions_approx(times, lon, lat, d1, d2)
Expand Down Expand Up @@ -1177,9 +1177,9 @@ fn mocpy(m: &Bound<'_, PyModule>) -> PyResult<()> {
lat: PyReadonlyArrayDyn<f64>,
d2: u8,
) -> PyResult<usize> {
let times = times.as_array().to_owned().into_raw_vec();
let lon = lon.as_array().to_owned().into_raw_vec();
let lat = lat.as_array().to_owned().into_raw_vec();
let times = times.to_vec().map_err(PyValueError::new_err)?;
let lon = lon.to_vec().map_err(PyValueError::new_err)?;
let lat = lat.to_vec().map_err(PyValueError::new_err)?;

U64MocStore::get_global_store()
.create_from_times_positions(times, lon, lat, d1, d2)
Expand Down Expand Up @@ -1224,10 +1224,10 @@ fn mocpy(m: &Bound<'_, PyModule>) -> PyResult<()> {
lat: PyReadonlyArrayDyn<f64>,
d2: u8,
) -> PyResult<usize> {
let times_min = times_min.as_array().to_owned().into_raw_vec();
let times_max = times_max.as_array().to_owned().into_raw_vec();
let lon = lon.as_array().to_owned().into_raw_vec();
let lat = lat.as_array().to_owned().into_raw_vec();
let times_min = times_min.to_vec().map_err(PyValueError::new_err)?;
let times_max = times_max.to_vec().map_err(PyValueError::new_err)?;
let lon = lon.to_vec().map_err(PyValueError::new_err)?;
let lat = lat.to_vec().map_err(PyValueError::new_err)?;

U64MocStore::get_global_store()
.create_from_time_ranges_positions_approx(times_min, times_max, d1, lon, lat, d2)
Expand Down Expand Up @@ -1266,10 +1266,10 @@ fn mocpy(m: &Bound<'_, PyModule>) -> PyResult<()> {
lat: PyReadonlyArrayDyn<f64>,
d2: u8,
) -> PyResult<usize> {
let times_min = times_min.as_array().to_owned().into_raw_vec();
let times_max = times_max.as_array().to_owned().into_raw_vec();
let lon = lon.as_array().to_owned().into_raw_vec();
let lat = lat.as_array().to_owned().into_raw_vec();
let times_min = times_min.to_vec().map_err(PyValueError::new_err)?;
let times_max = times_max.to_vec().map_err(PyValueError::new_err)?;
let lon = lon.to_vec().map_err(PyValueError::new_err)?;
let lat = lat.to_vec().map_err(PyValueError::new_err)?;

U64MocStore::get_global_store()
.create_from_time_ranges_positions(times_min, times_max, d1, lon, lat, d2)
Expand Down Expand Up @@ -1313,14 +1313,14 @@ fn mocpy(m: &Bound<'_, PyModule>) -> PyResult<()> {
d1: u8,
spatial_coverages: PyReadonlyArrayDyn<usize>,
) -> PyResult<usize> {
let times_min = times_min.as_array().to_owned().into_raw_vec();
let times_max = times_max.as_array().to_owned().into_raw_vec();
let times_min = times_min.to_vec().map_err(PyValueError::new_err)?;
let times_max = times_max.to_vec().map_err(PyValueError::new_err)?;
if times_min.len() != times_max.len() {
return Err(PyValueError::new_err(
"`times_min` and `times_max` do not have the same size.",
));
}
let spatial_coverage_indices = spatial_coverages.as_array().to_owned().into_raw_vec();
let spatial_coverage_indices = spatial_coverages.to_vec().map_err(PyValueError::new_err)?;
if times_min.len() != spatial_coverage_indices.len() {
return Err(PyValueError::new_err(
"`times` and `spatial indices` do not have the same size.",
Expand Down Expand Up @@ -1367,14 +1367,14 @@ fn mocpy(m: &Bound<'_, PyModule>) -> PyResult<()> {
d1: u8,
spatial_coverages: PyReadonlyArrayDyn<usize>,
) -> PyResult<usize> {
let times_min = times_min.as_array().to_owned().into_raw_vec();
let times_max = times_max.as_array().to_owned().into_raw_vec();
let times_min = times_min.to_vec().map_err(PyValueError::new_err)?;
let times_max = times_max.to_vec().map_err(PyValueError::new_err)?;
if times_min.len() != times_max.len() {
return Err(PyValueError::new_err(
"`times_min` and `times_max` do not have the same size.",
));
}
let spatial_coverage_indices = spatial_coverages.as_array().to_owned().into_raw_vec();
let spatial_coverage_indices = spatial_coverages.to_vec().map_err(PyValueError::new_err)?;
if times_min.len() != spatial_coverage_indices.len() {
return Err(PyValueError::new_err(
"`times` and `spatial indices` do not have the same size.",
Expand Down

0 comments on commit c8f6fc4

Please sign in to comment.