Skip to content

Commit

Permalink
Merge pull request #14 from HDFGroup/feature/multi_dataset
Browse files Browse the repository at this point in the history
Sync with canonical
  • Loading branch information
fortnern authored Oct 18, 2022
2 parents a4b5cc0 + eddff72 commit 3181632
Show file tree
Hide file tree
Showing 52 changed files with 3,250 additions and 834 deletions.
24 changes: 23 additions & 1 deletion .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,7 @@ jobs:
cmake: "Debug"
autotools: "debug"

- name: "Ubuntu gcc Autotools -Werror (build only)"
- name: "Ubuntu gcc Autotools -Werror (build only) DBG"
os: ubuntu-latest
cpp: enable
fortran: disable
Expand All @@ -291,6 +291,28 @@ jobs:
cmake: "Debug"
autotools: "debug"

- name: "Ubuntu gcc Autotools -Werror (build only) REL"
os: ubuntu-latest
cpp: enable
fortran: disable
java: disable
parallel: disable
mirror_vfd: enable
direct_vfd: enable
deprec_sym: enable
default_api: v114
toolchain: ""
generator: "autogen"
flags: "CFLAGS=-Werror"
run_tests: false
thread_safety:
- enabled: false
text: ""
build_mode:
- text: "REL"
cmake: "Release"
autotools: "production"

# Sets the job's name from the properties
name: "${{ matrix.name }}${{ matrix.build_mode.text }}${{ matrix.thread_safety.text }}"

Expand Down
1 change: 1 addition & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -1133,6 +1133,7 @@ if (EXISTS "${HDF5_SOURCE_DIR}/fortran" AND IS_DIRECTORY "${HDF5_SOURCE_DIR}/for

# Parallel IO usage requires MPI to be Linked and Included
if (H5_HAVE_PARALLEL)
find_package(MPI REQUIRED COMPONENTS Fortran)
set (LINK_Fortran_LIBS ${LINK_Fortran_LIBS} ${MPI_Fortran_LIBRARIES})
if (MPI_Fortran_LINK_FLAGS)
set (CMAKE_Fortran_EXE_LINKER_FLAGS "${MPI_Fortran_LINK_FLAGS} ${CMAKE_EXE_LINKER_FLAGS}")
Expand Down
8 changes: 5 additions & 3 deletions c++/test/tfilter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -190,13 +190,15 @@ test_szip_filter(H5File &file1)
hsize_t i, j, n;
for (i = n = 0; i < size[0]; i++) {
for (j = 0; j < size[1]; j++) {
points[i][j] = (int)n++;
points[i][j] = static_cast<int>(n++);
}
}

// Write to the dataset then read back the values
dataset.write((void *)points, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);
dataset.read((void *)check, PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL, xfer);
dataset.write(static_cast<void *>(points), PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL,
xfer);
dataset.read(static_cast<void *>(check), PredType::NATIVE_INT, DataSpace::ALL, DataSpace::ALL,
xfer);

// Check that the values read are the same as the values written
for (i = 0; i < size[0]; i++)
Expand Down
Loading

0 comments on commit 3181632

Please sign in to comment.