From b58e36d68c80abb2ae1e602e164f8dbe2149a5c5 Mon Sep 17 00:00:00 2001 From: Allen Byrne <50328838+byrnHDF@users.noreply.github.com> Date: Thu, 3 Oct 2024 14:32:38 -0500 Subject: [PATCH] Upgrade version version for new branch (#338) --- .github/workflows/ant-app.yml | 10 ++-- .github/workflows/ant.yml | 10 ++-- .github/workflows/clang-format-check.yml | 2 +- .github/workflows/clang-format-fix.yml | 2 +- .github/workflows/daily-build.yml | 18 +++--- .github/workflows/publish-branch.yml | 45 --------------- .github/workflows/publish-release.yml | 2 +- .github/workflows/release-files.yml | 4 +- .github/workflows/release.yml | 6 +- .github/workflows/remove-files.yml | 2 +- .github/workflows/tarball.yml | 2 +- VERSION | 2 +- docs/Build_HDFView.txt | 4 +- docs/RELEASE.txt | 2 +- docs/UsersGuide/RELEASE.txt | 2 +- docs/WorkingDirectory.txt | 2 +- package_files/macosx/Info.plist | 4 +- .../hdf/view/ViewProperties.java | 52 ----------------- .../hdf/object/h5/H5CompoundAttr.java | 56 ------------------- .../object/H5FileTest.java | 23 -------- 20 files changed, 37 insertions(+), 213 deletions(-) delete mode 100644 .github/workflows/publish-branch.yml diff --git a/.github/workflows/ant-app.yml b/.github/workflows/ant-app.yml index 4e205fd4..facab0e0 100644 --- a/.github/workflows/ant-app.yml +++ b/.github/workflows/ant-app.yml @@ -1,4 +1,4 @@ -name: hdfview ci runs +name: hdfview 1_16 app ci runs # Triggers the workflow on a call from another workflow on: @@ -20,7 +20,7 @@ on: description: 'The version name of the binaries' type: string required: false - default: HDFView-99.99.99 + default: HDFView-3.4.99 file_base: description: "The common base name of the source tarballs" required: true @@ -153,7 +153,7 @@ jobs: uses: dsaltares/fetch-gh-release-asset@master with: repo: 'HDFGroup/hdf5' - version: 'tags/snapshot' + version: 'tags/snapshot-1.16' file: '${{ inputs.name_hdf5 }}-win-vs2022_cl.zip' - name: Uncompress gh binary (Win) @@ -370,7 +370,7 @@ jobs: uses: dsaltares/fetch-gh-release-asset@master with: repo: 'HDFGroup/hdf5' - version: 'tags/snapshot' + version: 'tags/snapshot-1.16' file: '${{ inputs.name_hdf5 }}-ubuntu-2204_gcc.tar.gz' - name: List files for the space (Linux) @@ -562,7 +562,7 @@ jobs: uses: dsaltares/fetch-gh-release-asset@master with: repo: 'HDFGroup/hdf5' - version: 'tags/snapshot' + version: 'tags/snapshot-1.16' file: '${{ inputs.name_hdf5 }}-macos14_clang.tar.gz' - name: List files for the space (mac) diff --git a/.github/workflows/ant.yml b/.github/workflows/ant.yml index 186fc19f..4fd4ab9d 100644 --- a/.github/workflows/ant.yml +++ b/.github/workflows/ant.yml @@ -1,4 +1,4 @@ -name: hdfview ci runs +name: hdfview 1_16 ci runs # Triggers the workflow on a call from another workflow on: @@ -20,7 +20,7 @@ on: description: 'The version name of the binaries' type: string required: false - default: HDFView-99.99.99 + default: HDFView-3.4.99 file_base: description: "The common base name of the source tarballs" required: true @@ -153,7 +153,7 @@ jobs: uses: dsaltares/fetch-gh-release-asset@master with: repo: 'HDFGroup/hdf5' - version: 'tags/snapshot' + version: 'tags/snapshot-1.16' file: '${{ inputs.name_hdf5 }}-win-vs2022_cl.zip' - name: Uncompress gh binary (Win) @@ -362,7 +362,7 @@ jobs: uses: dsaltares/fetch-gh-release-asset@master with: repo: 'HDFGroup/hdf5' - version: 'tags/snapshot' + version: 'tags/snapshot-1.16' file: '${{ inputs.name_hdf5 }}-ubuntu-2204_gcc.tar.gz' - name: List files for the space (Linux) @@ -546,7 +546,7 @@ jobs: uses: dsaltares/fetch-gh-release-asset@master with: repo: 'HDFGroup/hdf5' - version: 'tags/snapshot' + version: 'tags/snapshot-1.16' file: '${{ inputs.name_hdf5 }}-macos14_clang.tar.gz' - name: List files for the space (mac) diff --git a/.github/workflows/clang-format-check.yml b/.github/workflows/clang-format-check.yml index f9afb8c7..973af218 100644 --- a/.github/workflows/clang-format-check.yml +++ b/.github/workflows/clang-format-check.yml @@ -1,4 +1,4 @@ -name: clang-format Check +name: clang-format 1_16 Check on: pull_request: permissions: diff --git a/.github/workflows/clang-format-fix.yml b/.github/workflows/clang-format-fix.yml index 23fa12e9..f2227c47 100644 --- a/.github/workflows/clang-format-fix.yml +++ b/.github/workflows/clang-format-fix.yml @@ -7,7 +7,7 @@ # # which you will need to set to "Read and write permissions" # -name: clang-format Commit Changes +name: clang-format 1_16 Commit Changes on: workflow_dispatch: push: diff --git a/.github/workflows/daily-build.yml b/.github/workflows/daily-build.yml index cf4ea140..6793eaf2 100644 --- a/.github/workflows/daily-build.yml +++ b/.github/workflows/daily-build.yml @@ -1,6 +1,6 @@ -name: hdfview daily build +name: hdfview 1_16 daily build -# Triggers the workflow on a schedule or on demand +# Triggered on pull request or on demand on: workflow_dispatch: inputs: @@ -9,8 +9,8 @@ on: type: string required: false default: check - schedule: - - cron: "5 0 * * *" + pull_request: + branches: [ "hdfview-3-1_16" ] permissions: contents: read @@ -30,7 +30,7 @@ jobs: - name: Get hdfview release base name uses: dsaltares/fetch-gh-release-asset@master with: - version: 'tags/HDFView-99.99.99' + version: 'tags/HDFView-3.4.99' file: 'last-file.txt' continue-on-error: true @@ -55,7 +55,7 @@ jobs: uses: dsaltares/fetch-gh-release-asset@master with: repo: 'HDFGroup/hdf5' - version: 'tags/snapshot' + version: 'tags/snapshot-1.16' file: 'last-file.txt' - name: Read base-name file @@ -69,7 +69,7 @@ jobs: - name: Read inputs id: getinputs run: | - echo "INPUTS_IGNORE=${{ github.event.inputs.use_ignore }}" >> $GITHUB_OUTPUT + echo "INPUTS_IGNORE=${{ inputs.use_ignore }}" >> $GITHUB_OUTPUT - run: echo "use_ignore is ${{ steps.getinputs.outputs.INPUTS_IGNORE }}." @@ -132,7 +132,7 @@ jobs: file_base: ${{ needs.call-workflow-tarball.outputs.file_base }} file_branch: ${{ needs.call-workflow-tarball.outputs.file_branch }} file_sha: ${{ needs.call-workflow-tarball.outputs.file_sha }} - use_tag: HDFView-99.99.99 + use_tag: HDFView-3.4.99 use_environ: snapshots if: ${{ ((needs.call-workflow-tarball.outputs.has_changes == 'true') || (needs.get-base-names.outputs.run-ignore == 'ignore')) }} @@ -143,7 +143,7 @@ jobs: uses: ./.github/workflows/remove-files.yml with: file_base: ${{ needs.get-base-names.outputs.hdfview-name }} - use_tag: HDFView-99.99.99 + use_tag: HDFView-3.4.99 use_environ: snapshots if: ${{ ((needs.call-workflow-tarball.outputs.has_changes == 'true') || (needs.get-base-names.outputs.run-ignore == 'ignore')) && (needs.get-base-names.outputs.hdfview-name != needs.call-workflow-tarball.outputs.file_base) }} diff --git a/.github/workflows/publish-branch.yml b/.github/workflows/publish-branch.yml deleted file mode 100644 index b47f9375..00000000 --- a/.github/workflows/publish-branch.yml +++ /dev/null @@ -1,45 +0,0 @@ -name: hdfview publish files in HDFVIEW from branch to S3 - -# Triggers the workflow on demand -on: - workflow_dispatch: - inputs: - local_dir: - description: 'HDFView local directory' - type: string - required: true - target_dir: - description: 'HDFView target bucket directory' - type: string - required: true - -permissions: - contents: read - -jobs: - publish-tag: - runs-on: ubuntu-latest - steps: - # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - - name: Get Sources - uses: actions/checkout@9a9194f87191a7e9055e3e9b95b8cfb13023bb08 # v4.1.7 - with: - fetch-depth: 0 - ref: '${{ github.head_ref || github.ref_name }}' - - - name: List files for the space - run: | - ls -l ${{ github.workspace }} - ls ${{ github.workspace }}/HDFVIEW - - - name: Setup AWS CLI - uses: aws-actions/configure-aws-credentials@v1 - with: - aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} - aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - aws-region: ${{ secrets.AWS_REGION }} - - - name: Sync dir to S3 bucket - run: | - aws s3 sync ./HDFVIEW/${{ inputs.local_dir }} s3://${{ secrets.AWS_S3_BUCKET }}/${{ vars.TARGET_PATH }}/${{ inputs.target_dir }} - diff --git a/.github/workflows/publish-release.yml b/.github/workflows/publish-release.yml index 3cd8afe2..dc48ebea 100644 --- a/.github/workflows/publish-release.yml +++ b/.github/workflows/publish-release.yml @@ -1,4 +1,4 @@ -name: hdfview publishrelease +name: hdfview 1.16 publishrelease # Triggers the workflow on demand on: diff --git a/.github/workflows/release-files.yml b/.github/workflows/release-files.yml index 0c455cf6..033ae108 100644 --- a/.github/workflows/release-files.yml +++ b/.github/workflows/release-files.yml @@ -1,4 +1,4 @@ -name: hdfview release-files +name: hdfview 1_16 release-files # Triggers the workflow on a call from another workflow on: @@ -8,7 +8,7 @@ on: description: 'Release version tag' type: string required: false - default: HDFView-99.99.99 + default: HDFView-3.4.99 use_environ: description: 'Environment to locate files' type: string diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 2dee8f0d..4e2fe0dd 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,4 +1,4 @@ -name: hdfview release build +name: hdfview 1_16 release build # Triggers the workflow on demand on: @@ -18,12 +18,12 @@ on: description: 'HDF5 Release version tag' type: string required: false - default: snapshot + default: snapshot-1.16 name_tag5: description: 'HDF5 Release version base name' type: string required: false - default: snapshot + default: hdf5-1.16 permissions: contents: read diff --git a/.github/workflows/remove-files.yml b/.github/workflows/remove-files.yml index 7ddf2713..e1de668c 100644 --- a/.github/workflows/remove-files.yml +++ b/.github/workflows/remove-files.yml @@ -8,7 +8,7 @@ on: description: 'Release version tag' type: string required: false - default: HDFView-99.99.99 + default: HDFView-3.4.99 use_environ: description: 'Environment to locate files' type: string diff --git a/.github/workflows/tarball.yml b/.github/workflows/tarball.yml index f570b4b5..f1a0b52e 100644 --- a/.github/workflows/tarball.yml +++ b/.github/workflows/tarball.yml @@ -1,4 +1,4 @@ -name: hdfview tarball +name: hdfview 1_16 tarball # Triggers the workflow on a call from another workflow on: diff --git a/VERSION b/VERSION index d3355a85..1e2ff889 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -VERSION=HDFView-99.99.99 \ No newline at end of file +VERSION=HDFView-3.4.99 diff --git a/docs/Build_HDFView.txt b/docs/Build_HDFView.txt index 78cd50a8..9f35fa3b 100644 --- a/docs/Build_HDFView.txt +++ b/docs/Build_HDFView.txt @@ -38,14 +38,14 @@ For example, #hdf.lib.dir = ${env.HDFLIBS}/lib #hdf5.lib.dir = ${env.HDF5LIBS}/lib hdf.lib.dir = //HDF_Group/HDF/4.3.X/lib - hdf5.lib.dir = //HDF_Group/HDF5/1.15.X/lib + hdf5.lib.dir = //HDF_Group/HDF5/1.16.X/lib From the top HDFView source code directory run: //ant run From the top HDFView source code directory run: //ant binaryPackage Go into the build/dist directory that gets created to find the HDFView distribution. -For example, HDFView-99.99.99-Linux_64.tar.gz. +For example, HDFView-3.4.99-Linux_64.tar.gz. Uncompress the HDFView binary and bring up HDFView by executing the bin/HDFView command. diff --git a/docs/RELEASE.txt b/docs/RELEASE.txt index 7b01e9cf..6349995d 100644 --- a/docs/RELEASE.txt +++ b/docs/RELEASE.txt @@ -4,7 +4,7 @@ The current HDFView products can be downloaded at: HDF Libraries ============================================================================= -This release was built and tested with HDF 4.3.x and HDF5 1.15.x. +This release was built and tested with HDF 4.3.x and HDF5 1.16.x. JDK diff --git a/docs/UsersGuide/RELEASE.txt b/docs/UsersGuide/RELEASE.txt index 7b01e9cf..6349995d 100644 --- a/docs/UsersGuide/RELEASE.txt +++ b/docs/UsersGuide/RELEASE.txt @@ -4,7 +4,7 @@ The current HDFView products can be downloaded at: HDF Libraries ============================================================================= -This release was built and tested with HDF 4.3.x and HDF5 1.15.x. +This release was built and tested with HDF 4.3.x and HDF5 1.16.x. JDK diff --git a/docs/WorkingDirectory.txt b/docs/WorkingDirectory.txt index 5e3b7219..c183fe40 100644 --- a/docs/WorkingDirectory.txt +++ b/docs/WorkingDirectory.txt @@ -35,7 +35,7 @@ The following locations are involved in where the working directory for a file s A. The initial working directory as described above. B. The directory used to load a file specified as an argument on the command line invocation of HDFView. java ... hdf.view.HDFView startDir=some_location "ARGS" - C. A location saved in the HDFView property file, .hdfview99.99.99 in the user home directory. + C. A location saved in the HDFView property file, .hdfview3.4.99 in the user home directory. work.dir=dir_path This is property is only set by using the "User Options" dialog. D. The last directory used to load a file using the HDFView file search dialog. diff --git a/package_files/macosx/Info.plist b/package_files/macosx/Info.plist index b1ff242b..0aa194f0 100644 --- a/package_files/macosx/Info.plist +++ b/package_files/macosx/Info.plist @@ -21,14 +21,14 @@ CFBundlePackageType APPL CFBundleShortVersionString - 99.99.99 + 3.4.99 CFBundleSignature ???? LSApplicationCategoryType public.app-category.utilities CFBundleVersion - 99.99.99 + 3.4.99 NSHumanReadableCopyright Copyright 2006 by The HDF Group NSHighResolutionCapable diff --git a/src/org.hdfgroup.hdfview/hdf/view/ViewProperties.java b/src/org.hdfgroup.hdfview/hdf/view/ViewProperties.java index f1c01df7..3867953b 100644 --- a/src/org.hdfgroup.hdfview/hdf/view/ViewProperties.java +++ b/src/org.hdfgroup.hdfview/hdf/view/ViewProperties.java @@ -1611,36 +1611,6 @@ public void load() throws IOException } } } - - // load srb account - // log.trace("load user properties: srb account"); - // propVal = null; - // String srbaccount[] = new String[7]; - // (int i = 0; i < MAX_RECENT_FILES; i++) { - // (null == (srbaccount[0] = getString("srbaccount" + i + ".host"))) - // continue; - // - // (null == (srbaccount[1] = getString("srbaccount" + i + ".port"))) - // continue; - // - // (null == (srbaccount[2] = getString("srbaccount" + i + ".user"))) - // continue; - // - // (null == (srbaccount[3] = getString("srbaccount" + i + ".password"))) - // continue; - // - // (null == (srbaccount[4] = getString("srbaccount" + i + ".home"))) - // continue; - // - // (null == (srbaccount[5] = getString("srbaccount" + i + ".domain"))) - // continue; - // - // (null == (srbaccount[6] = getString("srbaccount" + i + ".resource"))) - // continue; - // - // srbAccountList.add(srbaccount); - // srbaccount = new String[7]; - // } } /** @@ -1740,28 +1710,6 @@ else if (isReadSWMR) setValue("palette.file" + i, theFile); } - // save srb account - // log.trace("save user properties: srb account"); - // String srbaccount[] = null; - // size = srbAccountList.size(); - // minSize = Math.min(size, MAX_RECENT_FILES); - // (int i = 0; i < minSize; i++) { - // srbaccount = srbAccountList.get(i); - // ((srbaccount[0] != null) && (srbaccount[1] != null) && (srbaccount[2] != - // null) - // && (srbaccount[3] != null) && (srbaccount[4] != null) && (srbaccount[5] != - // null) - // && (srbaccount[6] != null)) { - // setValue("srbaccount" + i + ".host", srbaccount[0]); - // setValue("srbaccount" + i + ".port", srbaccount[1]); - // setValue("srbaccount" + i + ".user", srbaccount[2]); - // setValue("srbaccount" + i + ".password", srbaccount[3]); - // setValue("srbaccount" + i + ".home", srbaccount[4]); - // setValue("srbaccount" + i + ".domain", srbaccount[5]); - // setValue("srbaccount" + i + ".resource", srbaccount[6]); - // } - // } - // save default modules log.trace("save user properties: default modules"); String moduleName = moduleListTreeView.get(0); diff --git a/src/org.hdfgroup.object/hdf/object/h5/H5CompoundAttr.java b/src/org.hdfgroup.object/hdf/object/h5/H5CompoundAttr.java index 324dd8bd..c3625e11 100644 --- a/src/org.hdfgroup.object/hdf/object/h5/H5CompoundAttr.java +++ b/src/org.hdfgroup.object/hdf/object/h5/H5CompoundAttr.java @@ -1943,61 +1943,5 @@ public Object AttributeCommonIO(long attr_id, H5File.IO_TYPE ioType, Object objB public Object AttributeSelection() throws Exception { return originalBuf; - // H5Datatype dsDatatype = (H5Datatype) getDatatype(); - // Object theData = H5Datatype.allocateArray(dsDatatype, (int)nPoints); - // if (dsDatatype.isText() && convertByteToString && (theData instanceof byte[])) { - // log.trace("AttributeSelection(): isText: converting byte array to string array"); - // theData = byteToString((byte[]) theData, (int) dsDatatype.getDatatypeSize()); - // } - // else if (dsDatatype.isFloat() && dsDatatype.getDatatypeSize() == 16) { - // log.trace("AttributeSelection(): isFloat: converting byte array to BigDecimal array"); - // theData = dsDatatype.byteToBigDecimal(0, (int)nPoints, (byte[]) theData); - // } - // else if (dsDatatype.isArray() && dsDatatype.getDatatypeBase().isFloat() && - // dsDatatype.getDatatypeBase().getDatatypeSize() == 16) { - // log.trace("AttributeSelection(): isArray and isFloat: converting byte array to BigDecimal - // array"); long[] arrayDims = dsDatatype.getArrayDims(); int asize = (int)nPoints; for (int j - // = 0; j < arrayDims.length; j++) { - // asize *= arrayDims[j]; - // } - // theData = ((H5Datatype)dsDatatype.getDatatypeBase()).byteToBigDecimal(0, asize, (byte[]) - // theData); - // } - // Object theOrig = originalBuf; - - /* - * Copy the selection from originalBuf to theData Only three dims are involved and selected data is 2 - * dimensions getHeight() is the row dimension getWidth() is the col dimension getDepth() is the frame - * dimension - */ - // long[] start = getStartDims(); - // long curFrame = start[selectedIndex[2]]; - // for (int col = 0; col < (int)getWidth(); col++) { - // for (int row = 0; row < (int)getHeight(); row++) { - - // int k = (int)startDims[selectedIndex[2]] * (int)getDepth(); - // int index = row * (int)getWidth() + col; - // log.trace("compoundAttributeSelection(): point{} row:col:k={}:{}:{}", curFrame, row, - // col, k); int fromIndex = ((int)curFrame * (int)getWidth() * (int)getHeight() + - // col * (int)getHeight() + - // row);// * (int) dsDatatype.getDatatypeSize(); - // int toIndex = (col * (int)getHeight() + - // row);// * (int) dsDatatype.getDatatypeSize(); - // int objSize = 1; - // if (dsDatatype.isArray()) { - // long[] arrayDims = dsDatatype.getArrayDims(); - // objSize = (int)arrayDims.length; - // } - // for (int i = 0; i < ((ArrayList)theOrig).size(); i++) { - // Object theOrigobj = ((ArrayList)theOrig).get(i); - // Object theDataobj = ((ArrayList)theData).get(i); - // log.trace("compoundAttributeSelection(): theOrig={} theData={}", theOrigobj, - // theDataobj); System.arraycopy(theOrig, fromIndex, theData, toIndex, objSize); - // } - // } - // } - - // log.trace("compoundAttributeSelection(): theData={}", theData); - // return theData; } } diff --git a/test/org.hdfgroup.object.test/object/H5FileTest.java b/test/org.hdfgroup.object.test/object/H5FileTest.java index 2257a2f6..7f2c93ae 100644 --- a/test/org.hdfgroup.object.test/object/H5FileTest.java +++ b/test/org.hdfgroup.object.test/object/H5FileTest.java @@ -1586,19 +1586,6 @@ public void testCreateLink() assertFalse("H5Lget_info(): ", link_info == null); log.trace("H5Lget_info(): NAME_SOFT_LINK_DANGLE {}", link_info.type); assertTrue("H5Lget_info(): link type", link_info.type == HDF5Constants.H5L_TYPE_SOFT); - - // String[] link_value = { null, null }; - // String targetObjName = null; - // try { - // H5.H5Lget_value(gid, "NAME_SOFT_LINK_DANGLE", link_value, - // HDF5Constants.H5P_DEFAULT); - // } - // catch (Exception ex) { - // log.debug("H5Lget_value(): H5Lget_value {} failure: ", - // obj.getFullName(), ex); - // } - // log.trace("H5Lget_value(): NAME_SOFT_LINK_DANGLE {} {}", link_value[0], - // link_value[1]); assertEquals("DS1", link_value[0]); } finally { try { @@ -1777,16 +1764,6 @@ public void testCreateLinkExternal() assertFalse("H5Lget_info ", link_info == null); assertTrue("H5Lget_info link type", link_info.type == HDF5Constants.H5L_TYPE_EXTERNAL); - // String[] link_value = { null, null }; - // String targetObjName = null; - // try { - // H5.H5Lget_value(gid, "GROUP_HARD_LINK_DANGLE", link_value, HDF5Constants.H5P_DEFAULT); - // } - // catch (Exception ex) { - // log.debug("getLinkTargetName(): H5Lget_value {} failure: ", obj.getFullName(), ex); - // } - // assertEquals("DGroup", link_value[1] + FileFormat.FILE_OBJ_SEP + link_value[0]); - try { fgrp1.close(gid); }