diff --git a/.github/workflows/aocc-auto.yml b/.github/workflows/aocc-auto.yml index 8e87fb74604..11b1bc0ebd4 100644 --- a/.github/workflows/aocc-auto.yml +++ b/.github/workflows/aocc-auto.yml @@ -74,6 +74,7 @@ jobs: --enable-build-mode=${{ inputs.build_mode }} \ --enable-shared \ --enable-parallel \ + --enable-subfiling-vfd \ LDFLAGS="-L/home/runner/work/hdf5/hdf5/aocc-compiler-4.2.0/lib \ -L/home/runner/work/hdf5/hdf5/openmpi-4.1.6-install/lib" diff --git a/.github/workflows/aocc-cmake.yml b/.github/workflows/aocc-cmake.yml index 1e49511dfcc..60257a72a52 100644 --- a/.github/workflows/aocc-cmake.yml +++ b/.github/workflows/aocc-cmake.yml @@ -71,6 +71,7 @@ jobs: -DCMAKE_BUILD_TYPE=${{ inputs.build_mode }} \ -DHDF5_ENABLE_SZIP_SUPPORT:BOOL=OFF \ -DHDF5_ENABLE_PARALLEL:BOOL=ON \ + -DHDF5_ENABLE_SUBFILING_VFD:BOOL=ON \ -DHDF5_BUILD_CPP_LIB:BOOL=OFF \ -DLIBAEC_USE_LOCALCONTENT=OFF \ -DZLIB_USE_LOCALCONTENT=OFF \ diff --git a/.github/workflows/clang-format-check.yml b/.github/workflows/clang-format-check.yml index c4d68a8f004..8fd5b5fb8f7 100644 --- a/.github/workflows/clang-format-check.yml +++ b/.github/workflows/clang-format-check.yml @@ -1,8 +1,10 @@ name: clang-format Check on: pull_request: + permissions: contents: read + jobs: formatting-check: name: Formatting Check @@ -10,8 +12,9 @@ jobs: if: "!contains(github.event.head_commit.message, 'skip-ci')" steps: - uses: actions/checkout@v4.1.7 + - name: Run clang-format style check for C and Java code - uses: DoozyX/clang-format-lint-action@v0.17 + uses: DoozyX/clang-format-lint-action@v0.18.2 with: source: '.' extensions: 'c,h,cpp,hpp,java' diff --git a/.github/workflows/clang-format-fix.yml b/.github/workflows/clang-format-fix.yml index 2ce9f6e9dad..a99d32d38ad 100644 --- a/.github/workflows/clang-format-fix.yml +++ b/.github/workflows/clang-format-fix.yml @@ -11,8 +11,10 @@ name: clang-format Commit Changes on: workflow_dispatch: push: + permissions: contents: read + jobs: formatting-check: name: Commit Format Changes @@ -21,9 +23,10 @@ jobs: permissions: contents: write # In order to allow EndBug/add-and-commit to commit changes steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@v4.1.7 + - name: Fix C and Java formatting issues detected by clang-format - uses: DoozyX/clang-format-lint-action@d3c7f85989e3b6416265a0d12f8b4a8aa8b0c4ff # v0.13 + uses: DoozyX/clang-format-lint-action@v0.18.2 with: source: '.' extensions: 'c,h,cpp,hpp,java' @@ -31,6 +34,7 @@ jobs: inplace: True style: file exclude: './config ./hl/src/H5LTanalyze.c ./hl/src/H5LTparse.c ./hl/src/H5LTparse.h ./src/H5Epubgen.h ./src/H5Einit.h ./src/H5Eterm.h ./src/H5Edefin.h ./src/H5version.h ./src/H5overflow.h' + - uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4 with: author_name: github-actions diff --git a/.github/workflows/cmake-ctest.yml b/.github/workflows/cmake-ctest.yml index 11d9ab79bed..a2f2df55780 100644 --- a/.github/workflows/cmake-ctest.yml +++ b/.github/workflows/cmake-ctest.yml @@ -23,6 +23,12 @@ on: required: true default: snapshots secrets: + APPLE_CERTS_BASE64: + required: true + APPLE_CERTS_BASE64_PASSWD: + required: true + KEYCHAIN_PASSWD: + required: true AZURE_TENANT_ID: required: true AZURE_CLIENT_ID: @@ -53,13 +59,15 @@ jobs: run: | if [[ '${{ env.signing_secret }}' == '' ]] then - SIGN_VAL=$(echo "false") + SIGN_VAL=$(echo 'notexists') else - SIGN_VAL=$(echo "true") + SIGN_VAL=$(echo 'exists') fi echo "BINSIGN=$SIGN_VAL" >> $GITHUB_OUTPUT shell: bash + - run: echo "signing is ${{ steps.set-signing-state.outputs.BINSIGN }}." + build_and_test_win: # Windows w/ MSVC + CMake # @@ -114,7 +122,27 @@ jobs: run: 7z x ${{ steps.set-file-base.outputs.FILE_BASE }}.zip shell: bash + - name: Install TrustedSigning (Windows) + run: | + Invoke-WebRequest -Uri https://dist.nuget.org/win-x86-commandline/latest/nuget.exe -OutFile .\nuget.exe + .\nuget.exe install Microsoft.Windows.SDK.BuildTools -Version 10.0.22621.3233 -x + .\nuget.exe install Microsoft.Trusted.Signing.Client -Version 1.0.53 -x + shell: pwsh + if: ${{ needs.check-secret.outputs.sign-state == 'exists' }} + + - name: create-json + id: create-json + uses: jsdaniell/create-json@v1.2.3 + with: + name: "credentials.json" + dir: '${{ steps.set-file-base.outputs.SOURCE_BASE }}' + json: '{"Endpoint": "${{ secrets.AZURE_ENDPOINT }}","CodeSigningAccountName": "${{ secrets.AZURE_CODE_SIGNING_NAME }}","CertificateProfileName": "${{ secrets.AZURE_CERT_PROFILE_NAME }}"}' + if: ${{ needs.check-secret.outputs.sign-state == 'exists' }} + - name: Run ctest (Windows) + env: + BINSIGN: ${{ needs.check-secret.outputs.sign-state }} + SIGNTOOLDIR: ${{ github.workspace }}/Microsoft.Windows.SDK.BuildTools/bin/10.0.22621.0/x64 run: | cd "${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}" cmake --workflow --preset=${{ inputs.preset_name }}-MSVC --fresh @@ -134,7 +162,7 @@ jobs: file-digest: SHA256 timestamp-rfc3161: http://timestamp.acs.microsoft.com timestamp-digest: SHA256 - if: ${{ needs.check-secret.outputs.sign-state == 'true' }} + if: ${{ needs.check-secret.outputs.sign-state == 'exists' }} - name: Publish binary (Windows) id: publish-ctest-binary @@ -323,6 +351,28 @@ jobs: with: version: "1.9.7" + - name: Install the Apple certificate and provisioning profile + shell: bash + env: + BUILD_CERTIFICATE_BASE64: ${{ secrets.APPLE_CERTS_BASE64 }} + P12_PASSWORD: ${{ secrets.APPLE_CERTS_BASE64_PASSWD }} + KEYCHAIN_PASSWD: ${{ secrets.KEYCHAIN_PASSWD }} + run: | + # create variables + CERTIFICATE_PATH=$RUNNER_TEMP/build_certificate.p12 + KEYCHAIN_FILE=${{ vars.KEYCHAIN_NAME }}.keychain + # import certificate from secrets + echo $BUILD_CERTIFICATE_BASE64 | base64 --decode > $CERTIFICATE_PATH + security -v create-keychain -p $KEYCHAIN_PASSWD $KEYCHAIN_FILE + security -v list-keychain -d user -s $KEYCHAIN_FILE + security -v list-keychains + security -v set-keychain-settings -lut 21600 $KEYCHAIN_FILE + security -v unlock-keychain -p $KEYCHAIN_PASSWD $KEYCHAIN_FILE + # import certificate to keychain + security -v import $CERTIFICATE_PATH -P $P12_PASSWORD -A -t cert -f pkcs12 -k $KEYCHAIN_FILE + security -v set-key-partition-list -S apple-tool:,codesign:,apple: -k $KEYCHAIN_PASSWD $KEYCHAIN_FILE + if: ${{ needs.check-secret.outputs.sign-state == 'exists' }} + - name: Set up JDK 19 uses: actions/setup-java@v4 with: @@ -367,11 +417,100 @@ jobs: - name: Run ctest (MacOS_latest) id: run-ctest + env: + BINSIGN: ${{ needs.check-secret.outputs.sign-state }} + SIGNER: ${{ vars.SIGNER }} run: | cd "${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}" - cmake --workflow --preset=${{ inputs.preset_name }}-MACOS-Clang --fresh + cmake --workflow --preset=${{ inputs.preset_name }}-macos-Clang --fresh + shell: bash + + - name: Sign dmg (MacOS_latest) + id: sign-dmg + env: + KEYCHAIN_PASSWD: ${{ secrets.KEYCHAIN_PASSWD }} + KEYCHAIN_NAME: ${{ vars.KEYCHAIN_NAME }} + SIGNER: ${{ vars.SIGNER }} + NOTARY_USER: ${{ vars.NOTARY_USER }} + NOTARY_KEY: ${{ vars.NOTARY_KEY }} + run: | + /usr/bin/codesign --force --timestamp --options runtime --entitlements ${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}/config/cmake/distribution.entitlements --verbose=4 --strict --sign ${{ env.SIGNER }} --deep ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-macos-Clang/*.dmg + if: ${{ needs.check-secret.outputs.sign-state == 'exists' }} shell: bash + - name: Check dmg timestamp (MacOS_latest) + run: | + /usr/bin/codesign -dvv ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-macos-Clang/*.dmg + if: ${{ needs.check-secret.outputs.sign-state == 'exists' }} + shell: bash + + - name: Verify dmg (MacOS_latest) + run: | + /usr/bin/hdiutil verify ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-macos-Clang/*.dmg + if: ${{ needs.check-secret.outputs.sign-state == 'exists' }} + shell: bash + + - name: Notarize dmg (MacOS_latest) + id: notarize-dmg + env: + KEYCHAIN_PASSWD: ${{ secrets.KEYCHAIN_PASSWD }} + KEYCHAIN_NAME: ${{ vars.KEYCHAIN_NAME }} + SIGNER: ${{ vars.SIGNER }} + NOTARY_USER: ${{ vars.NOTARY_USER }} + NOTARY_KEY: ${{ vars.NOTARY_KEY }} + run: | + jsonout=$(/usr/bin/xcrun notarytool submit --wait --output-format json --apple-id ${{ env.NOTARY_USER }} --password ${{ env.NOTARY_KEY }} --team-id ${{ env.SIGNER }} ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-macos-Clang/*.dmg) + echo "JSONOUT=$jsonout" >> $GITHUB_OUTPUT + if: ${{ needs.check-secret.outputs.sign-state == 'exists' }} + shell: bash + + - name: Get ID token (MacOS_latest) + id: get-id-token + run: | + echo "notary result is ${{ fromJson(steps.notarize-dmg.outputs.JSONOUT) }}" + token=${{ fromJson(steps.notarize-dmg.outputs.JSONOUT).id }} + echo "ID_TOKEN=$token" >> "$GITHUB_OUTPUT" + if: ${{ needs.check-secret.outputs.sign-state == 'exists' }} + shell: bash + + - name: post notary check (MacOS_latest) + id: post-notary + env: + KEYCHAIN_PASSWD: ${{ secrets.KEYCHAIN_PASSWD }} + KEYCHAIN_NAME: ${{ vars.KEYCHAIN_NAME }} + SIGNER: ${{ vars.SIGNER }} + NOTARY_USER: ${{ vars.NOTARY_USER }} + NOTARY_KEY: ${{ vars.NOTARY_KEY }} + run: | + { + echo 'NOTARYOUT<> $GITHUB_OUTPUT + if: ${{ needs.check-secret.outputs.sign-state == 'exists' }} + shell: bash + + - name: Get notary info (MacOS_latest) + id: get-notary-info + run: | + echo "notary info is ${{ steps.post-notary.outputs.NOTARYOUT }}." + if: ${{ needs.check-secret.outputs.sign-state == 'exists' }} + shell: bash + + - name: Staple dmg (MacOS_latest) + id: staple-dmg + env: + KEYCHAIN_PASSWD: ${{ secrets.KEYCHAIN_PASSWD }} + KEYCHAIN_NAME: ${{ vars.KEYCHAIN_NAME }} + SIGNER: ${{ vars.SIGNER }} + NOTARY_USER: ${{ vars.NOTARY_USER }} + NOTARY_KEY: ${{ vars.NOTARY_KEY }} + run: | + /usr/bin/xcrun stapler staple ${{ runner.workspace }}/hdf5/build/${{ inputs.preset_name }}-macos-Clang/*.dmg + if: ${{ needs.check-secret.outputs.sign-state == 'exists' }} + shell: bash + continue-on-error: true + - name: Publish binary (MacOS_latest) id: publish-ctest-binary run: | @@ -395,7 +534,7 @@ jobs: cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-Clang/README.md ${{ runner.workspace }}/builddmg/hdf5 cp ${{ runner.workspace }}/hdf5/build114/${{ inputs.preset_name }}-Clang/*.dmg ${{ runner.workspace }}/builddmg/hdf5 cd "${{ runner.workspace }}/builddmg" - tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-macos14_clang.arm64.dmg.tar.gz hdf5 + tar -zcvf ${{ steps.set-file-base.outputs.FILE_BASE }}-macos14_clang.dmg.tar.gz hdf5 shell: bash - name: List files in the space (MacOS_latest) @@ -415,7 +554,7 @@ jobs: uses: actions/upload-artifact@v4 with: name: tgz-macos14_clang-dmg-binary - path: ${{ runner.workspace }}/builddmg/${{ steps.set-file-base.outputs.FILE_BASE }}-macos14_clang.arm64.dmg.tar.gz + path: ${{ runner.workspace }}/builddmg/${{ steps.set-file-base.outputs.FILE_BASE }}-macos14_clang.dmg.tar.gz if-no-files-found: error # 'warn' or 'ignore' are also available, defaults to `warn` build_and_test_S3_linux: @@ -544,11 +683,30 @@ jobs: run: 7z x ${{ steps.set-file-base.outputs.FILE_BASE }}.zip shell: bash + - name: Install TrustedSigning (Windows) + run: | + Invoke-WebRequest -Uri https://dist.nuget.org/win-x86-commandline/latest/nuget.exe -OutFile .\nuget.exe + .\nuget.exe install Microsoft.Windows.SDK.BuildTools -Version 10.0.22621.3233 -x + .\nuget.exe install Microsoft.Trusted.Signing.Client -Version 1.0.53 -x + shell: pwsh + if: ${{ needs.check-secret.outputs.sign-state == 'exists' }} + + - name: create-json + id: create-json + uses: jsdaniell/create-json@v1.2.3 + with: + name: "credentials.json" + dir: '${{ steps.set-file-base.outputs.SOURCE_BASE }}' + json: '{"Endpoint": "${{ secrets.AZURE_ENDPOINT }}","CodeSigningAccountName": "${{ secrets.AZURE_CODE_SIGNING_NAME }}","CertificateProfileName": "${{ secrets.AZURE_CERT_PROFILE_NAME }}"}' + if: ${{ needs.check-secret.outputs.sign-state == 'exists' }} + - name: Run ctest (Windows_intel) with oneapi env: FC: ${{ steps.setup-fortran.outputs.fc }} CC: ${{ steps.setup-fortran.outputs.cc }} CXX: ${{ steps.setup-fortran.outputs.cxx }} + BINSIGN: ${{ needs.check-secret.outputs.sign-state }} + SIGNTOOLDIR: ${{ github.workspace }}/Microsoft.Windows.SDK.BuildTools/bin/10.0.22621.0/x64 run: | cd "${{ runner.workspace }}/hdf5/${{ steps.set-file-base.outputs.SOURCE_BASE }}" cmake --workflow --preset=${{ inputs.preset_name }}-win-Intel --fresh @@ -568,7 +726,7 @@ jobs: file-digest: SHA256 timestamp-rfc3161: http://timestamp.acs.microsoft.com timestamp-digest: SHA256 - if: ${{ needs.check-secret.outputs.sign-state == 'true' }} + if: ${{ needs.check-secret.outputs.sign-state == 'exists' }} - name: Publish binary (Windows_intel) id: publish-ctest-binary diff --git a/.github/workflows/daily-build.yml b/.github/workflows/daily-build.yml index 358be50a538..2464fd77a84 100644 --- a/.github/workflows/daily-build.yml +++ b/.github/workflows/daily-build.yml @@ -45,6 +45,9 @@ jobs: #use_tag: snapshot-1.14 use_environ: snapshots secrets: + APPLE_CERTS_BASE64: ${{ secrets.APPLE_CERTS_BASE64 }} + APPLE_CERTS_BASE64_PASSWD: ${{ secrets.APPLE_CERTS_BASE64_PASSWD }} + KEYCHAIN_PASSWD: ${{ secrets.KEYCHAIN_PASSWD }} AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }} AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }} AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }} diff --git a/.github/workflows/julia-auto.yml b/.github/workflows/julia-auto.yml index 9c13990edf6..8f523ec183d 100644 --- a/.github/workflows/julia-auto.yml +++ b/.github/workflows/julia-auto.yml @@ -17,7 +17,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Get Sources - uses: actions/checkout@v4.1.1 + uses: actions/checkout@v4.1.7 - name: Install Dependencies shell: bash @@ -60,7 +60,7 @@ jobs: arch: 'x64' - name: Get julia hdf5 source - uses: actions/checkout@v4.1.1 + uses: actions/checkout@v4.1.7 with: repository: JuliaIO/HDF5.jl path: . diff --git a/.github/workflows/julia-cmake.yml b/.github/workflows/julia-cmake.yml index 1972deefd8f..6560135f0b1 100644 --- a/.github/workflows/julia-cmake.yml +++ b/.github/workflows/julia-cmake.yml @@ -17,7 +17,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Get Sources - uses: actions/checkout@v4.1.1 + uses: actions/checkout@v4.1.7 - name: Install Dependencies shell: bash @@ -63,7 +63,7 @@ jobs: arch: 'x64' - name: Get julia hdf5 source - uses: actions/checkout@v4.1.1 + uses: actions/checkout@v4.1.7 with: repository: JuliaIO/HDF5.jl path: . diff --git a/.github/workflows/main-auto-par.yml b/.github/workflows/main-auto-par.yml index bd0b3b2a83b..506e213eaa0 100644 --- a/.github/workflows/main-auto-par.yml +++ b/.github/workflows/main-auto-par.yml @@ -63,6 +63,7 @@ jobs: --with-default-api-version=v114 \ --enable-shared \ --enable-parallel \ + --enable-subfiling-vfd \ --disable-cxx \ --enable-fortran \ --disable-java \ diff --git a/.github/workflows/main-auto-spc.yml b/.github/workflows/main-auto-spc.yml index 9087c4365b7..bb416478949 100644 --- a/.github/workflows/main-auto-spc.yml +++ b/.github/workflows/main-auto-spc.yml @@ -428,7 +428,7 @@ jobs: --enable-cxx \ --disable-fortran \ --enable-java \ - --disable-mirror-vfd \ + --enable-mirror-vfd \ --enable-direct-vfd \ --disable-ros3-vfd \ --with-szlib=yes @@ -491,7 +491,7 @@ jobs: --enable-cxx \ --disable-fortran \ --enable-java \ - --disable-mirror-vfd \ + --enable-mirror-vfd \ --enable-direct-vfd \ --disable-ros3-vfd \ --with-szlib=yes diff --git a/.github/workflows/main-cmake-par.yml b/.github/workflows/main-cmake-par.yml index 2e724bf031e..e6428917b7e 100644 --- a/.github/workflows/main-cmake-par.yml +++ b/.github/workflows/main-cmake-par.yml @@ -47,6 +47,7 @@ jobs: -DBUILD_SHARED_LIBS=ON \ -DHDF5_ENABLE_ALL_WARNINGS=ON \ -DHDF5_ENABLE_PARALLEL:BOOL=ON \ + -DHDF5_ENABLE_SUBFILING_VFD:BOOL=ON \ -DHDF5_BUILD_CPP_LIB:BOOL=OFF \ -DHDF5_BUILD_FORTRAN=ON \ -DHDF5_BUILD_JAVA=OFF \ diff --git a/.github/workflows/publish-branch.yml b/.github/workflows/publish-branch.yml index 1e5b99bd0b0..6c52c75308a 100644 --- a/.github/workflows/publish-branch.yml +++ b/.github/workflows/publish-branch.yml @@ -22,7 +22,7 @@ jobs: steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - name: Get Sources - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + uses: actions/checkout@9a9194f87191a7e9055e3e9b95b8cfb13023bb08 # v4.1.7 with: fetch-depth: 0 ref: '${{ github.head_ref || github.ref_name }}' diff --git a/.github/workflows/publish-release.yml b/.github/workflows/publish-release.yml index c153d217d87..dd7f4bfbc86 100644 --- a/.github/workflows/publish-release.yml +++ b/.github/workflows/publish-release.yml @@ -26,7 +26,7 @@ jobs: steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - name: Get Sources - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + uses: actions/checkout@9a9194f87191a7e9055e3e9b95b8cfb13023bb08 # v4.1.7 with: fetch-depth: 0 ref: '${{ github.head_ref || github.ref_name }}' diff --git a/.github/workflows/release-files.yml b/.github/workflows/release-files.yml index 2233aff2bb5..baf439c5919 100644 --- a/.github/workflows/release-files.yml +++ b/.github/workflows/release-files.yml @@ -40,7 +40,7 @@ jobs: steps: # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it - name: Get Sources - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + uses: actions/checkout@9a9194f87191a7e9055e3e9b95b8cfb13023bb08 # v4.1.7 with: fetch-depth: 0 ref: '${{ github.head_ref || github.ref_name }}' @@ -186,7 +186,7 @@ jobs: sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}.zip >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt - sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.arm64.dmg.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt + sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.dmg.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt sha256sum ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm.tar.gz >> ${{ steps.get-file-base.outputs.FILE_BASE }}.sha256sums.txt @@ -232,7 +232,7 @@ jobs: ${{ steps.get-file-base.outputs.FILE_BASE }}.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}.zip ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.tar.gz - ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.arm64.dmg.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.dmg.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm.tar.gz @@ -261,7 +261,7 @@ jobs: hdf5.tar.gz hdf5.zip ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.tar.gz - ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.arm64.dmg.tar.gz + ${{ steps.get-file-base.outputs.FILE_BASE }}-macos14_clang.dmg.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.deb.tar.gz ${{ steps.get-file-base.outputs.FILE_BASE }}-ubuntu-2204_gcc.rpm.tar.gz diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e7a7825789b..b383cedf535 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -41,6 +41,9 @@ jobs: snap_name: hdf5-${{ needs.call-workflow-tarball.outputs.source_base }} use_environ: release secrets: + APPLE_CERTS_BASE64: ${{ secrets.APPLE_CERTS_BASE64 }} + APPLE_CERTS_BASE64_PASSWD: ${{ secrets.APPLE_CERTS_BASE64_PASSWD }} + KEYCHAIN_PASSWD: ${{ secrets.KEYCHAIN_PASSWD }} AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }} AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }} AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }} diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index e67627fd885..d59f8c9bc6b 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -32,7 +32,7 @@ jobs: steps: - name: "Checkout code" - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + uses: actions/checkout@9a9194f87191a7e9055e3e9b95b8cfb13023bb08 # v4.1.7 with: persist-credentials: false @@ -67,6 +67,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: "Upload to code-scanning" - uses: github/codeql-action/upload-sarif@afb54ba388a7dca6ecae48f608c4ff05ff4cc77a # v3.25.15 + uses: github/codeql-action/upload-sarif@4dd16135b69a43b6c8efb853346f8437d92d3c93 # v3.26.6 with: sarif_file: results.sarif diff --git a/CITATION.cff b/CITATION.cff index 4e611a57468..f7eaf133318 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -9,4 +9,4 @@ authors: website: 'https://www.hdfgroup.org' repository-code: 'https://github.com/HDFGroup/hdf5' url: 'https://www.hdfgroup.org/HDF5/' -repository-artifact: 'https://www.hdfgroup.org/downloads/hdf5/' +repository-artifact: 'https://support.hdfgroup.org/downloads/HDF5' diff --git a/CMakeInstallation.cmake b/CMakeInstallation.cmake index 313dbb2e8e3..a2564e68ad5 100644 --- a/CMakeInstallation.cmake +++ b/CMakeInstallation.cmake @@ -268,6 +268,11 @@ if (NOT HDF5_EXTERNALLY_CONFIGURED AND NOT HDF5_NO_PACKAGES) endif () set (CPACK_PACKAGE_ICON "${HDF_RESOURCES_DIR}/hdf.bmp") + set (CPACK_ORIG_SOURCE_DIR ${CMAKE_SOURCE_DIR}) + if ("$ENV{BINSIGN}" STREQUAL "exists") + set (CPACK_PRE_BUILD_SCRIPTS ${CMAKE_SOURCE_DIR}/config/cmake/SignPackageFiles.cmake) + endif () + set (CPACK_GENERATOR "TGZ") if (WIN32) set (CPACK_GENERATOR "ZIP") diff --git a/CMakePresets.json b/CMakePresets.json index 5d3cd7e1313..e9c02352149 100644 --- a/CMakePresets.json +++ b/CMakePresets.json @@ -155,6 +155,26 @@ "ci-StdShar" ] }, + { + "name": "ci-StdShar-macos-Clang", + "description": "Clang Standard Config for macos (Release)", + "inherits": [ + "ci-macos-Release-Clang", + "ci-CPP", + "ci-Java", + "ci-StdShar" + ] + }, + { + "name": "ci-StdShar-macos-GNUC", + "description": "GNUC Standard Config for macos (Release)", + "inherits": [ + "ci-macos-Release-GNUC", + "ci-CPP", + "ci-Java", + "ci-StdShar" + ] + }, { "name": "ci-StdShar-GNUC", "description": "GNUC Standard Config for x64 (Release)", @@ -203,6 +223,23 @@ "ci-x64-Release-Clang" ] }, + { + "name": "ci-StdShar-macos-Clang", + "description": "Clang Standard Build for macos (Release)", + "configurePreset": "ci-StdShar-macos-Clang", + "inherits": [ + "ci-macos-Release-Clang" + ] + }, + { + "name": "ci-StdShar-macos-GNUC", + "description": "GNUC Standard Build for macos (Release)", + "configurePreset": "ci-StdShar-macos-GNUC", + "verbose": true, + "inherits": [ + "ci-macos-Release-GNUC" + ] + }, { "name": "ci-StdShar-GNUC", "description": "GNUC Standard Build for x64 (Release)", @@ -252,22 +289,24 @@ ] }, { - "name": "ci-StdShar-MACOS-Clang", - "configurePreset": "ci-StdShar-Clang", + "name": "ci-StdShar-macos-Clang", + "configurePreset": "ci-StdShar-macos-Clang", "inherits": [ - "ci-x64-Release-Clang" + "ci-macos-Release-Clang" ], "execution": { "noTestsAction": "error", "timeout": 180, "jobs": 2 - }, - "condition": { - "type": "equals", - "lhs": "${hostSystemName}", - "rhs": "Darwin" } }, + { + "name": "ci-StdShar-macos-GNUC", + "configurePreset": "ci-StdShar-macos-GNUC", + "inherits": [ + "ci-macos-Release-GNUC" + ] + }, { "name": "ci-StdShar-GNUC", "configurePreset": "ci-StdShar-GNUC", @@ -318,6 +357,16 @@ "configurePreset": "ci-StdShar-Clang", "inherits": "ci-x64-Release-Clang" }, + { + "name": "ci-StdShar-macos-Clang", + "configurePreset": "ci-StdShar-macos-Clang", + "inherits": "ci-macos-Release-Clang" + }, + { + "name": "ci-StdShar-macos-GNUC", + "configurePreset": "ci-StdShar-macos-GNUC", + "inherits": "ci-macos-Release-GNUC" + }, { "name": "ci-StdShar-GNUC", "configurePreset": "ci-StdShar-GNUC", @@ -354,12 +403,12 @@ ] }, { - "name": "ci-StdShar-MACOS-Clang", + "name": "ci-StdShar-macos-Clang", "steps": [ - {"type": "configure", "name": "ci-StdShar-Clang"}, - {"type": "build", "name": "ci-StdShar-Clang"}, - {"type": "test", "name": "ci-StdShar-MACOS-Clang"}, - {"type": "package", "name": "ci-StdShar-Clang"} + {"type": "configure", "name": "ci-StdShar-macos-Clang"}, + {"type": "build", "name": "ci-StdShar-macos-Clang"}, + {"type": "test", "name": "ci-StdShar-macos-Clang"}, + {"type": "package", "name": "ci-StdShar-macos-Clang"} ] }, { @@ -371,6 +420,15 @@ {"type": "package", "name": "ci-StdShar-GNUC"} ] }, + { + "name": "ci-StdShar-macos-GNUC", + "steps": [ + {"type": "configure", "name": "ci-StdShar-macos-GNUC"}, + {"type": "build", "name": "ci-StdShar-macos-GNUC"}, + {"type": "test", "name": "ci-StdShar-macos-GNUC"}, + {"type": "package", "name": "ci-StdShar-macos-GNUC"} + ] + }, { "name": "ci-StdShar-GNUC-S3", "steps": [ diff --git a/HDF5Examples/C/CMakeLists.txt b/HDF5Examples/C/CMakeLists.txt index 97a9ea1f672..b394212150e 100644 --- a/HDF5Examples/C/CMakeLists.txt +++ b/HDF5Examples/C/CMakeLists.txt @@ -11,7 +11,7 @@ add_subdirectory (${PROJECT_SOURCE_DIR}/H5T) if (${H5_LIBVER_DIR} GREATER 16) # add_subdirectory (${PROJECT_SOURCE_DIR}/Perf) - if (USE_SHARED_LIBS AND HDF_BUILD_FILTERS AND HDF5_ENABLE_PLUGIN_SUPPORT) + if (USE_SHARED_LIBS AND H5EX_BUILD_FILTERS AND HDF5_ENABLE_PLUGIN_SUPPORT) add_subdirectory (${PROJECT_SOURCE_DIR}/H5FLT) endif () endif () diff --git a/HDF5Examples/CMakeLists.txt b/HDF5Examples/CMakeLists.txt index 7f823abb1e7..5ce806f6253 100644 --- a/HDF5Examples/CMakeLists.txt +++ b/HDF5Examples/CMakeLists.txt @@ -39,8 +39,8 @@ message (STATUS "HDF5 H5_LIBVER_DIR: ${H5_LIBVER_DIR} HDF5_VERSION_MAJOR: ${HDF5 #----------------------------------------------------------------------------- # Option to build JAVA examples #----------------------------------------------------------------------------- -option (HDF_BUILD_JAVA "Build JAVA support" OFF) -if (HDF_BUILD_JAVA) +option (H5EX_BUILD_JAVA "Build JAVA support" OFF) +if (H5EX_BUILD_JAVA) find_package (Java) include (${H5EX_RESOURCES_DIR}/UseJava.cmake) @@ -73,8 +73,8 @@ endif () # Option to Enable MPI Parallel #----------------------------------------------------------------------------- set (CMAKE_MODULE_PATH ${H5EX_RESOURCES_DIR} ${CMAKE_MODULE_PATH}) -option (HDF_ENABLE_PARALLEL "Enable parallel build (requires MPI)" OFF) -if (HDF_ENABLE_PARALLEL) +option (H5EX_ENABLE_PARALLEL "Enable parallel build (requires MPI)" OFF) +if (H5EX_ENABLE_PARALLEL) find_package(MPI REQUIRED) if (MPI_C_FOUND) set (H5_HAVE_PARALLEL 1) @@ -99,29 +99,6 @@ if (H5_HAVE_PARALLEL) INCLUDE_DIRECTORIES (${MPI_C_INCLUDE_DIRS}) endif () -#----------------------------------------------------------------------------- -# Option to use threadsafe -# Note: Currently CMake only allows configuring of threadsafe on WINDOWS. -#----------------------------------------------------------------------------- -option (HDF_ENABLE_THREADSAFE "Enable Threadsafety" OFF) -# Note that HDF_ENABLE_THREADSAFE is the CMake option for determining -# whether to enable thread-safety in the examples. HDF5_ENABLE_THREADSAFE -# is the CMake option determining whether HDF5 was configured with -# thread-safety enabled. -if (HDF_ENABLE_THREADSAFE AND HDF5_ENABLE_THREADSAFE) - if (WIN32) - set (H5_HAVE_WIN_THREADS 1) - set (H5_HAVE_THREADSAFE 1) - endif () - set(CMAKE_THREAD_PREFER_PTHREAD TRUE) - set(THREADS_PREFER_PTHREAD_FLAG TRUE) - find_package(Threads REQUIRED) - if (NOT Threads_FOUND) - message (STATUS " **** thread-safe package not found - threads still might work **** ") - endif () - set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} Threads::Threads) -endif () - set_directory_properties(PROPERTIES INCLUDE_DIRECTORIES "${H5EX_HDF5_INCLUDE_DIRS}" ) @@ -150,8 +127,8 @@ if (${H5_LIBVER_DIR} GREATER 16) set (H5_FC_FUNC "H5_FC_FUNC(name,NAME) name ## _") set (H5_FC_FUNC_ "H5_FC_FUNC_(name,NAME) name ## _") if (EXISTS "${H5EXAMPLES_SOURCE_DIR}/FORTRAN" AND IS_DIRECTORY "${H5EXAMPLES_SOURCE_DIR}/FORTRAN") - option (HDF_BUILD_FORTRAN "Build examples FORTRAN support" OFF) - if (HDF_BUILD_FORTRAN AND HDF5_BUILD_FORTRAN) + option (H5EX_BUILD_FORTRAN "Build examples FORTRAN support" OFF) + if (H5EX_BUILD_FORTRAN AND HDF5_BUILD_FORTRAN) set (H5EX_LINK_Fortran_LIBS ${H5EX_HDF5_LINK_LIBS}) # Parallel IO usage requires MPI to be Linked and Included @@ -165,10 +142,10 @@ if (${H5_LIBVER_DIR} GREATER 16) configure_file (${H5EX_F90_SRC_DIR}/H5D/h5_version.h.in ${PROJECT_BINARY_DIR}/FORTRAN/H5D/h5_version.h @ONLY) configure_file (${H5EX_F90_SRC_DIR}/H5D/h5_version.h.in ${PROJECT_BINARY_DIR}/FORTRAN/H5G/h5_version.h @ONLY) else () - set (HDF_BUILD_FORTRAN OFF CACHE BOOL "Build examples FORTRAN support" FORCE) + set (H5EX_BUILD_FORTRAN OFF CACHE BOOL "Build examples FORTRAN support" FORCE) endif () else () - set (HDF_BUILD_FORTRAN OFF CACHE BOOL "Build examples FORTRAN support" FORCE) + set (H5EX_BUILD_FORTRAN OFF CACHE BOOL "Build examples FORTRAN support" FORCE) endif () if (${H5_LIBVER_DIR} GREATER 18) @@ -176,29 +153,29 @@ if (${H5_LIBVER_DIR} GREATER 16) # Option to build JAVA examples #----------------------------------------------------------------------------- if (EXISTS "${H5EXAMPLES_SOURCE_DIR}/JAVA" AND IS_DIRECTORY "${H5EXAMPLES_SOURCE_DIR}/JAVA") - option (HDF_BUILD_JAVA "Build examples JAVA support" OFF) + option (H5EX_BUILD_JAVA "Build examples JAVA support" OFF) else () - set (HDF_BUILD_JAVA OFF CACHE BOOL "Build examples JAVA support" FORCE) + set (H5EX_BUILD_JAVA OFF CACHE BOOL "Build examples JAVA support" FORCE) endif () else () - set (HDF_BUILD_JAVA OFF CACHE BOOL "Build examples JAVA support" FORCE) + set (H5EX_BUILD_JAVA OFF CACHE BOOL "Build examples JAVA support" FORCE) endif () #----------------------------------------------------------------------------- # Build the CPP Examples #----------------------------------------------------------------------------- if (EXISTS "${H5EXAMPLES_SOURCE_DIR}/CXX" AND IS_DIRECTORY "${H5EXAMPLES_SOURCE_DIR}/CXX") - option (HDF_BUILD_CPP_LIB "Build examples C++ support" OFF) + option (H5EX_BUILD_CPP_LIB "Build examples C++ support" OFF) else () - set (HDF_BUILD_CPP_LIB OFF CACHE BOOL "Build examples C++ support" FORCE) + set (H5EX_BUILD_CPP_LIB OFF CACHE BOOL "Build examples C++ support" FORCE) endif () #----------------------------------------------------------------------------- # Option to build filter examples #----------------------------------------------------------------------------- if (EXISTS "${H5EXAMPLES_SOURCE_DIR}/C/H5FLT" AND IS_DIRECTORY "${H5EXAMPLES_SOURCE_DIR}/C/H5FLT") - option (HDF_BUILD_FILTERS "Build examples PLUGIN filter support" OFF) - if (HDF_BUILD_FILTERS AND HDF5_ENABLE_PLUGIN_SUPPORT) + option (H5EX_BUILD_FILTERS "Build examples PLUGIN filter support" OFF) + if (H5EX_BUILD_FILTERS AND HDF5_ENABLE_PLUGIN_SUPPORT) if(DEFINED ENV{HDF5_PLUGIN_PATH}) message (STATUS "ENV PATH=$ENV{HDF5_PLUGIN_PATH}") set (H5EX_HDF5_PLUGIN_PATH $ENV{HDF5_PLUGIN_PATH}) @@ -210,32 +187,32 @@ if (${H5_LIBVER_DIR} GREATER 16) endif () message (STATUS "H5EX_HDF5_PLUGIN_PATH=${H5EX_HDF5_PLUGIN_PATH}") else () - set (HDF_BUILD_FILTERS OFF CACHE BOOL "Build examples PLUGIN filter support" FORCE) + set (H5EX_BUILD_FILTERS OFF CACHE BOOL "Build examples PLUGIN filter support" FORCE) endif () else () - set (HDF_BUILD_FILTERS OFF CACHE BOOL "Build examples PLUGIN filter support" FORCE) + set (H5EX_BUILD_FILTERS OFF CACHE BOOL "Build examples PLUGIN filter support" FORCE) endif () else () - set (HDF_BUILD_FORTRAN OFF} CACHE BOOL "Build examples FORTRAN support" FORCE) - set (HDF_BUILD_JAVA OFF CACHE BOOL "Build examples JAVA support" FORCE) - set (HDF_BUILD_CPP_LIB OFF CACHE BOOL "Build examples C++ support" FORCE) - set (HDF_BUILD_FILTERS OFF CACHE BOOL "Build examples PLUGIN filter support" FORCE) + set (H5EX_BUILD_FORTRAN OFF} CACHE BOOL "Build examples FORTRAN support" FORCE) + set (H5EX_BUILD_JAVA OFF CACHE BOOL "Build examples JAVA support" FORCE) + set (H5EX_BUILD_CPP_LIB OFF CACHE BOOL "Build examples C++ support" FORCE) + set (H5EX_BUILD_FILTERS OFF CACHE BOOL "Build examples PLUGIN filter support" FORCE) endif () #----------------------------------------------------------------------------- # Build examples #----------------------------------------------------------------------------- add_subdirectory (C) -if (HDF_BUILD_FORTRAN AND HDF5_BUILD_FORTRAN) +if (H5EX_BUILD_FORTRAN AND HDF5_BUILD_FORTRAN) add_subdirectory (FORTRAN) endif () -if (HDF_BUILD_JAVA AND HDF5_BUILD_JAVA) +if (H5EX_BUILD_JAVA AND HDF5_BUILD_JAVA) add_subdirectory (JAVA) endif () -if (HDF_BUILD_CPP_LIB AND HDF5_BUILD_CPP_LIB) +if (H5EX_BUILD_CPP_LIB AND HDF5_BUILD_CPP_LIB) add_subdirectory (CXX) endif () -if (HDF_BUILD_PYTHON) +if (H5EX_BUILD_PYTHON) add_subdirectory (PYTHON) endif () diff --git a/HDF5Examples/CMakePresets.json b/HDF5Examples/CMakePresets.json index 2dbf304111d..0f2c1b5e013 100644 --- a/HDF5Examples/CMakePresets.json +++ b/HDF5Examples/CMakePresets.json @@ -18,21 +18,21 @@ "name": "ci-StdJava", "hidden": true, "cacheVariables": { - "HDF_BUILD_JAVA": "ON" + "H5EX_BUILD_JAVA": "ON" } }, { "name": "ci-StdFortran", "hidden": true, "cacheVariables": { - "HDF_BUILD_FORTRAN": "ON" + "H5EX_BUILD_FORTRAN": "ON" } }, { "name": "ci-StdPlugins", "hidden": true, "cacheVariables": { - "HDF_BUILD_FILTERS": "ON" + "H5EX_BUILD_FILTERS": "ON" } }, { diff --git a/HDF5Examples/FORTRAN/H5D/CMakeLists.txt b/HDF5Examples/FORTRAN/H5D/CMakeLists.txt index b5d202eca17..236d90108e4 100644 --- a/HDF5Examples/FORTRAN/H5D/CMakeLists.txt +++ b/HDF5Examples/FORTRAN/H5D/CMakeLists.txt @@ -10,7 +10,7 @@ project (HDF5Examples_FORTRAN_H5D Fortran) # Setup include Directories #----------------------------------------------------------------------------- set_directory_properties(PROPERTIES INCLUDE_DIRECTORIES - "${CMAKE_Fortran_MODULE_DIRECTORY}${HDF_MOD_EXT};${HDF5_F90_BINARY_DIR};${PROJECT_BINARY_DIR};${CMAKE_LIBRARY_OUTPUT_DIRECTORY}" + "${CMAKE_Fortran_MODULE_DIRECTORY}${H5EX_MOD_EXT};${HDF5_F90_BINARY_DIR};${PROJECT_BINARY_DIR};${CMAKE_LIBRARY_OUTPUT_DIRECTORY}" ) #----------------------------------------------------------------------------- diff --git a/HDF5Examples/FORTRAN/H5G/CMakeLists.txt b/HDF5Examples/FORTRAN/H5G/CMakeLists.txt index 78e2c4fc399..5a3207f10d7 100644 --- a/HDF5Examples/FORTRAN/H5G/CMakeLists.txt +++ b/HDF5Examples/FORTRAN/H5G/CMakeLists.txt @@ -10,7 +10,7 @@ project (HDF5Examples_FORTRAN_H5G Fortran) # Setup include Directories #----------------------------------------------------------------------------- set_directory_properties(PROPERTIES INCLUDE_DIRECTORIES - "${CMAKE_Fortran_MODULE_DIRECTORY}${HDF_MOD_EXT};${HDF5_F90_BINARY_DIR};${PROJECT_BINARY_DIR};${CMAKE_LIBRARY_OUTPUT_DIRECTORY}" + "${CMAKE_Fortran_MODULE_DIRECTORY}${H5EX_MOD_EXT};${HDF5_F90_BINARY_DIR};${PROJECT_BINARY_DIR};${CMAKE_LIBRARY_OUTPUT_DIRECTORY}" ) #----------------------------------------------------------------------------- @@ -46,7 +46,7 @@ foreach (example_name ${common_examples}) endif () endforeach () -#if (HDF_ENABLE_F2003) +#if (H5EX_ENABLE_F2003) # foreach (example_name ${f03examples}) # add_executable (${EXAMPLE_VARNAME}_f90_${example_name} ${PROJECT_SOURCE_DIR}/${example_name}.c) # target_compile_options(${EXAMPLE_VARNAME}_f90_${example_name} @@ -147,7 +147,7 @@ if (HDF5_BUILD_TOOLS) ) endif () -#if (HDF_ENABLE_F2003) +#if (H5EX_ENABLE_F2003) # foreach (example_name ${f03examples}) # add_custom_command ( # TARGET ${EXAMPLE_VARNAME}_f90_${example_name} @@ -430,7 +430,7 @@ if (H5EX_BUILD_TESTING) #ADD_H5_CMP_TEST (h5ex_g_traverse) #endif() else () - if (HDF_ENABLE_F2003) + if (H5EX_ENABLE_F2003) ADD_H5_CMP_TEST (h5ex_g_intermediate) ADD_H5_CMP_TEST (h5ex_g_iterate) # ADD_H5_CMP_TEST (h5ex_g_traverse) diff --git a/HDF5Examples/FORTRAN/H5G/Fortran_sourcefiles.cmake b/HDF5Examples/FORTRAN/H5G/Fortran_sourcefiles.cmake index e2e8e9d42b8..f5586e61f8c 100644 --- a/HDF5Examples/FORTRAN/H5G/Fortran_sourcefiles.cmake +++ b/HDF5Examples/FORTRAN/H5G/Fortran_sourcefiles.cmake @@ -23,7 +23,7 @@ if (HDF5_VERSION_STRING VERSION_GREATER_EQUAL "1.10.0") ) endif() else () - if (HDF_ENABLE_F2003) + if (H5EX_ENABLE_F2003) set (common_examples ${common_examples} h5ex_g_intermediate diff --git a/HDF5Examples/FORTRAN/H5PAR/CMakeLists.txt b/HDF5Examples/FORTRAN/H5PAR/CMakeLists.txt index 293af1e3377..1e920341598 100644 --- a/HDF5Examples/FORTRAN/H5PAR/CMakeLists.txt +++ b/HDF5Examples/FORTRAN/H5PAR/CMakeLists.txt @@ -10,7 +10,7 @@ project (HDF5Examples_FORTRAN_H5PAR Fortran) # Setup include Directories #----------------------------------------------------------------------------- set_directory_properties(PROPERTIES INCLUDE_DIRECTORIES - "${CMAKE_Fortran_MODULE_DIRECTORY}${HDF_MOD_EXT};${HDF5_F90_BINARY_DIR};${PROJECT_BINARY_DIR};${CMAKE_LIBRARY_OUTPUT_DIRECTORY}" + "${CMAKE_Fortran_MODULE_DIRECTORY}${H5EX_MOD_EXT};${HDF5_F90_BINARY_DIR};${PROJECT_BINARY_DIR};${CMAKE_LIBRARY_OUTPUT_DIRECTORY}" ) #----------------------------------------------------------------------------- diff --git a/HDF5Examples/FORTRAN/H5T/CMakeLists.txt b/HDF5Examples/FORTRAN/H5T/CMakeLists.txt index ff7f6459ff0..53822052072 100644 --- a/HDF5Examples/FORTRAN/H5T/CMakeLists.txt +++ b/HDF5Examples/FORTRAN/H5T/CMakeLists.txt @@ -5,7 +5,7 @@ project (HDF5Examples_FORTRAN_H5T Fortran) # Setup include Directories #----------------------------------------------------------------------------- set_directory_properties(PROPERTIES INCLUDE_DIRECTORIES - "${CMAKE_Fortran_MODULE_DIRECTORY}${HDF_MOD_EXT};${HDF5_F90_BINARY_DIR};${PROJECT_BINARY_DIR};${CMAKE_LIBRARY_OUTPUT_DIRECTORY}" + "${CMAKE_Fortran_MODULE_DIRECTORY}${H5EX_MOD_EXT};${HDF5_F90_BINARY_DIR};${PROJECT_BINARY_DIR};${CMAKE_LIBRARY_OUTPUT_DIRECTORY}" ) #----------------------------------------------------------------------------- @@ -13,7 +13,7 @@ set_directory_properties(PROPERTIES INCLUDE_DIRECTORIES #----------------------------------------------------------------------------- include (Fortran_sourcefiles.cmake) -#if (HDF_ENABLE_F2003) +#if (H5EX_ENABLE_F2003) foreach (example_name ${f03_examples}) add_executable (${EXAMPLE_VARNAME}_f90_${example_name} ${PROJECT_SOURCE_DIR}/${example_name}.F90) target_compile_options(${EXAMPLE_VARNAME}_f90_${example_name} @@ -124,7 +124,7 @@ if (HDF5_BUILD_TOOLS) ) endforeach () - #if (HDF_ENABLE_F2003) + #if (H5EX_ENABLE_F2003) foreach (example_name ${f03_examples}) if (NOT ${example_name} STREQUAL "h5ex_t_convert_F03") if (${example_name} STREQUAL "h5ex_t_vlen_F03" OR ${example_name} STREQUAL "h5ex_t_vlenatt_F03") @@ -402,7 +402,7 @@ if (H5EX_BUILD_TESTING) endif () endmacro () - #if (HDF_ENABLE_F2003) + #if (H5EX_ENABLE_F2003) foreach (example_name ${f03_examples} ${common_examples}) TEST_EXAMPLE (${example_name}) endforeach () diff --git a/HDF5Examples/FORTRAN/TUTR/CMakeLists.txt b/HDF5Examples/FORTRAN/TUTR/CMakeLists.txt index 9d998f258d0..cd705450f8e 100644 --- a/HDF5Examples/FORTRAN/TUTR/CMakeLists.txt +++ b/HDF5Examples/FORTRAN/TUTR/CMakeLists.txt @@ -5,7 +5,7 @@ project (HDF5Examples_FORTRAN_TUTR Fortran) # Setup include Directories #----------------------------------------------------------------------------- set_directory_properties(PROPERTIES INCLUDE_DIRECTORIES - "${CMAKE_Fortran_MODULE_DIRECTORY}${HDF_MOD_EXT};${HDF5_F90_BINARY_DIR};${PROJECT_BINARY_DIR};${CMAKE_LIBRARY_OUTPUT_DIRECTORY}" + "${CMAKE_Fortran_MODULE_DIRECTORY}${H5EX_MOD_EXT};${HDF5_F90_BINARY_DIR};${PROJECT_BINARY_DIR};${CMAKE_LIBRARY_OUTPUT_DIRECTORY}" ) #----------------------------------------------------------------------------- @@ -13,7 +13,7 @@ set_directory_properties(PROPERTIES INCLUDE_DIRECTORIES #----------------------------------------------------------------------------- include (Fortran_sourcefiles.cmake) -#if (HDF_ENABLE_F2003) +#if (H5EX_ENABLE_F2003) foreach (example_name ${f03_examples}) add_executable (${EXAMPLE_VARNAME}_f90_tutr_${example_name} ${PROJECT_SOURCE_DIR}/${example_name}.f90) target_compile_options(${EXAMPLE_VARNAME}_f90_tutr_${example_name} @@ -110,7 +110,7 @@ if (H5EX_BUILD_TESTING) set (last_test "${EXAMPLE_VARNAME}_f90_tutr_${testname}") endmacro () - #if (HDF_ENABLE_F2003) + #if (H5EX_ENABLE_F2003) foreach (example_name ${f03_examples} ${common_examples}) ADD_H5_TEST (${example_name}) endforeach () diff --git a/HDF5Examples/README.md b/HDF5Examples/README.md index 2f0090ba02c..c1a27d5fb12 100644 --- a/HDF5Examples/README.md +++ b/HDF5Examples/README.md @@ -48,17 +48,17 @@ HDF5 SNAPSHOTS, PREVIOUS RELEASES AND SOURCE CODE -------------------------------------------- Full Documentation and Programming Resources for this HDF5 can be found at - https://portal.hdfgroup.org/documentation/index.html + https://support.hdfgroup.org/documentation/hdf5/index.html Periodically development code snapshots are provided at the following URL: - - https://gamma.hdfgroup.org/ftp/pub/outgoing/hdf5/snapshots/ + + https://github.com/HDFGroup/hdf5/releases Source packages for current and previous releases are located at: - - https://portal.hdfgroup.org/downloads/ + + https://support.hdfgroup.org/releases/hdf5/downloads/ Development code is available at our Github location: - + https://github.com/HDFGroup/hdf5.git diff --git a/HDF5Examples/Using_CMake.txt b/HDF5Examples/Using_CMake.txt index 778fa7534b5..baef3565194 100644 --- a/HDF5Examples/Using_CMake.txt +++ b/HDF5Examples/Using_CMake.txt @@ -99,8 +99,8 @@ These steps are described in more detail below. is: * H5EX_BUILD_TESTING:BOOL=ON * BUILD_SHARED_LIBS:BOOL=[ON | OFF] - * HDF_BUILD_FORTRAN:BOOL=[ON | OFF] - * HDF_BUILD_JAVA:BOOL=[ON | OFF] + * H5EX_BUILD_FORTRAN:BOOL=[ON | OFF] + * H5EX_BUILD_JAVA:BOOL=[ON | OFF] if the hdf5 library was built with a namespace (i.e. "hdf5::") add: -D HDF5_NAMESPACE:STRING=hdf5:: diff --git a/HDF5Examples/config/cmake/HDFExampleMacros.cmake b/HDF5Examples/config/cmake/HDFExampleMacros.cmake index 9888c06d36a..bbb042177a3 100644 --- a/HDF5Examples/config/cmake/HDFExampleMacros.cmake +++ b/HDF5Examples/config/cmake/HDFExampleMacros.cmake @@ -34,7 +34,7 @@ macro (BASIC_SETTINGS varname) set (CMAKE_C_STANDARD 99) set (CMAKE_C_STANDARD_REQUIRED TRUE) - if (HDF_BUILD_CPP_LIB) + if (H5EX_BUILD_CPP_LIB) ENABLE_LANGUAGE (CXX) set (CMAKE_CXX_STANDARD 98) @@ -66,12 +66,12 @@ macro (BASIC_SETTINGS varname) #----------------------------------------------------------------------------- # Option to allow the user to disable compiler warnings #----------------------------------------------------------------------------- - option (HDF_DISABLE_COMPILER_WARNINGS "Disable compiler warnings" OFF) - if (HDF_DISABLE_COMPILER_WARNINGS) + option (H5EX_DISABLE_COMPILER_WARNINGS "Disable compiler warnings" OFF) + if (H5EX_DISABLE_COMPILER_WARNINGS) # MSVC uses /w to suppress warnings. It also complains if another # warning level is given, so remove it. if (MSVC) - set (HDF_WARNINGS_BLOCKED 1) + set (H5EX_WARNINGS_BLOCKED 1) string (REGEX REPLACE "(^| )([/-])W[0-9]( |$)" " " CMAKE_C_FLAGS "${CMAKE_C_FLAGS}") set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /w") if (CMAKE_CXX_COMPILER_LOADED AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU") @@ -84,12 +84,12 @@ macro (BASIC_SETTINGS varname) endif () # Borland uses -w- to suppress warnings. if (BORLAND) - set (HDF_WARNINGS_BLOCKED 1) + set (H5EX_WARNINGS_BLOCKED 1) set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -w-") endif () # Most compilers use -w to suppress warnings. - if (NOT HDF_WARNINGS_BLOCKED) + if (NOT H5EX_WARNINGS_BLOCKED) set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -w") if (CMAKE_CXX_COMPILER_LOADED AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU") set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -w") @@ -106,8 +106,8 @@ macro (BASIC_SETTINGS varname) endmacro () macro (PYTHON_SUPPORT) - option (HDF_BUILD_PYTHON "Test Python3 support" OFF) - if (HDF_BUILD_PYTHON) + option (H5EX_BUILD_PYTHON "Test Python3 support" OFF) + if (H5EX_BUILD_PYTHON) find_package (Python3 COMPONENTS Interpreter Development NumPy) if (Python3_FOUND AND Python3_NumPy_FOUND) include (ExternalProject) @@ -122,7 +122,7 @@ macro (PYTHON_SUPPORT) INSTALL_COMMAND python3 -m pip --no-cache-dir install -v . ) else () - set (HDF_BUILD_PYTHON OFF CACHE BOOL "Test Python3 support" FORCE) + set (H5EX_BUILD_PYTHON OFF CACHE BOOL "Test Python3 support" FORCE) message (STATUS "Python3:${Python3_FOUND} or numpy:${Python3_NumPy_FOUND} not found - disable test of Python examples") endif () endif () @@ -137,16 +137,16 @@ macro (HDF5_SUPPORT) set (FIND_HDF_COMPONENTS C shared) else () set (FIND_HDF_COMPONENTS C static) - set (HDF_BUILD_JAVA OFF CACHE BOOL "Build Java support" FORCE) + set (HDEXF_BUILD_JAVA OFF CACHE BOOL "Build Java support" FORCE) message (STATUS "Using static HDF5 - disable build of Java examples") endif () - if (HDF_BUILD_FORTRAN) + if (H5EX_BUILD_FORTRAN) set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} Fortran) endif () - if (HDF_BUILD_CPP_LIB) + if (H5EX_BUILD_CPP_LIB) set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} CXX) endif () - if (HDF_BUILD_JAVA) + if (H5EX_BUILD_JAVA) set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} Java) set (HDF5_Java_FOUND 1) #default setting for 1.10.1 and earlier endif () @@ -161,13 +161,13 @@ macro (HDF5_SUPPORT) if (NOT HDF5_static_C_FOUND AND NOT HDF5_shared_C_FOUND) #find library from non-dual-binary package set (FIND_HDF_COMPONENTS C) - if (HDF_BUILD_FORTRAN) + if (H5EX_BUILD_FORTRAN) set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} Fortran) endif () - if (HDF_BUILD_JAVA) + if (H5EX_BUILD_JAVA) set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} Java) endif () - if (HDF_BUILD_CPP_LIB) + if (H5EX_BUILD_CPP_LIB) set (FIND_HDF_COMPONENTS ${FIND_HDF_COMPONENTS} CXX) endif () message (STATUS "HDF5 find comps: ${FIND_HDF_COMPONENTS}") @@ -185,7 +185,7 @@ macro (HDF5_SUPPORT) else () set_property (TARGET ${HDF5_NAMESPACE}h5dump PROPERTY IMPORTED_LOCATION "${HDF5_TOOLS_DIR}/h5dump") endif () - if (HDF_BUILD_JAVA) + if (H5EX_BUILD_JAVA) set (CMAKE_JAVA_INCLUDE_PATH "${CMAKE_JAVA_INCLUDE_PATH};${HDF5_JAVA_INCLUDE_DIRS}") message (STATUS "HDF5 jars:${HDF5_JAVA_INCLUDE_DIRS}") endif () @@ -218,21 +218,21 @@ macro (HDF5_SUPPORT) endif() if (NOT HDF5_static_Fortran_FOUND AND NOT HDF5_shared_Fortran_FOUND) - set (HDF_BUILD_FORTRAN OFF CACHE BOOL "Build FORTRAN support" FORCE) + set (H5EX_BUILD_FORTRAN OFF CACHE BOOL "Build FORTRAN support" FORCE) message (STATUS "HDF5 Fortran libs not found - disable build of Fortran examples") else () - if (HDF_BUILD_FORTRAN AND ${HDF5_BUILD_FORTRAN}) + if (H5EX_BUILD_FORTRAN AND ${HDF5_BUILD_FORTRAN}) if (BUILD_SHARED_LIBS AND HDF5_shared_Fortran_FOUND) set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_FORTRAN_SHARED_LIBRARY}) elseif (HDF5_static_Fortran_FOUND) set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_FORTRAN_STATIC_LIBRARY}) else () - set (HDF_BUILD_FORTRAN OFF CACHE BOOL "Build FORTRAN support" FORCE) + set (H5EX_BUILD_FORTRAN OFF CACHE BOOL "Build FORTRAN support" FORCE) message (STATUS "HDF5 Fortran libs not found - disable build of Fortran examples") endif () endif () endif () - if (HDF_BUILD_JAVA AND HDF5_Java_FOUND) + if (H5EX_BUILD_JAVA AND HDF5_Java_FOUND) if (${HDF5_BUILD_JAVA}) set (CMAKE_JAVA_INCLUDE_PATH "${CMAKE_JAVA_INCLUDE_PATH};${HDF5_JAVA_INCLUDE_DIRS}") get_target_property (libsoname ${HDF5_JAVA_LIBRARY} IMPORTED_SONAME${UPPER_BUILD_TYPE}) @@ -243,11 +243,11 @@ macro (HDF5_SUPPORT) set (H5EX_JAVA_LIBRARIES ${HDF5_JAVA_LIBRARY}) message (STATUS "HDF5 lib:${H5EX_JAVA_LIBRARY} jars:${HDF5_JAVA_INCLUDE_DIRS}}") else () - set (HDF_BUILD_JAVA OFF CACHE BOOL "Build Java support" FORCE) + set (H5EX_BUILD_JAVA OFF CACHE BOOL "Build Java support" FORCE) message (STATUS "HDF5 Java libs not found - disable build of Java examples") endif () else () - set (HDF_BUILD_JAVA OFF CACHE BOOL "Build Java support" FORCE) + set (H5EX_BUILD_JAVA OFF CACHE BOOL "Build Java support" FORCE) endif () endif () else () @@ -283,7 +283,7 @@ macro (HDF5_SUPPORT) set (H5EX_HDF5_HAVE_HDF5 1) message (STATUS "HDF5-${HDF5_VERSION_STRING} used") endif () - if (HDF_BUILD_FORTRAN) + if (H5EX_BUILD_FORTRAN) list (APPEND H5EX_HDF5_INCLUDE_DIRS ${HDF5_INCLUDE_DIR_FORTRAN}) endif () message (STATUS "HDF5 link libs: ${H5EX_HDF5_LINK_LIBS} Includes: ${H5EX_HDF5_INCLUDE_DIRS}") diff --git a/HDF5Examples/config/cmake/cacheinit.cmake b/HDF5Examples/config/cmake/cacheinit.cmake index 3e5a8eb8c5a..0ef3a0e1b9c 100644 --- a/HDF5Examples/config/cmake/cacheinit.cmake +++ b/HDF5Examples/config/cmake/cacheinit.cmake @@ -8,17 +8,15 @@ set (BUILD_SHARED_LIBS ON CACHE BOOL "Build Shared Libraries" FORCE) set (H5EX_BUILD_TESTING ON CACHE BOOL "Build HDF5 Unit Testing" FORCE) -#set (HDF_ENABLE_PARALLEL ON CACHE BOOL "Enable parallel build (requires MPI)" FORCE) +#set (H5EX_ENABLE_PARALLEL ON CACHE BOOL "Enable parallel build (requires MPI)" FORCE) -#set (HDF_BUILD_FORTRAN ON CACHE BOOL "Build FORTRAN support" FORCE) +#set (H5EX_BUILD_FORTRAN ON CACHE BOOL "Build FORTRAN support" FORCE) -#set (HDF_BUILD_FILTERS ON CACHE BOOL "Build filter support" FORCE) +#set (H5EX_BUILD_FILTERS ON CACHE BOOL "Build filter support" FORCE) -#set (HDF_ENABLE_F2003 ON CACHE BOOL "Enable FORTRAN 2003 Standard" FORCE) +#set (H5EX_ENABLE_F2003 ON CACHE BOOL "Enable FORTRAN 2003 Standard" FORCE) -#set (HDF_ENABLE_THREADSAFE ON CACHE BOOL "Enable Threadsafety" FORCE) - -#set (HDF_BUILD_JAVA ON CACHE BOOL "Build JAVA support" FORCE) +#set (H5EX_BUILD_JAVA ON CACHE BOOL "Build JAVA support" FORCE) set (HDF5_PACKAGE_NAME "hdf5" CACHE STRING "Name of HDF5 package" FORCE) diff --git a/README.md b/README.md index af48a7f8656..f0d9db221d4 100644 --- a/README.md +++ b/README.md @@ -77,7 +77,7 @@ Periodically development code snapshots are provided at the following URL: Source packages for current and previous releases are located at: - https://portal.hdfgroup.org/Downloads + https://support.hdfgroup.org/downloads/hdf5 Development code is available at our Github location: diff --git a/config/cmake-presets/hidden-presets.json b/config/cmake-presets/hidden-presets.json index 98a42b7a801..bc8d03a42c6 100644 --- a/config/cmake-presets/hidden-presets.json +++ b/config/cmake-presets/hidden-presets.json @@ -10,7 +10,7 @@ "binaryDir": "${sourceParentDir}/build114/${presetName}", "installDir": "${sourceParentDir}/install114/${presetName}" }, - { + { "name": "ci-x64", "architecture": { "value": "x64", @@ -21,7 +21,15 @@ { "name": "ci-x86", "architecture": { - "value": "x86", + "value": "Win32", + "strategy": "external" + }, + "hidden": true + }, + { + "name": "ci-arm64", + "architecture": { + "value": "ARM64", "strategy": "external" }, "hidden": true @@ -48,25 +56,29 @@ "CMAKE_C_COMPILER": "cl", "CMAKE_CXX_COMPILER": "cl" }, - "toolset": { - "value": "host=x64", - "strategy": "external" - }, "condition": { "type": "equals", "lhs": "${hostSystemName}", "rhs": "Windows" } }, + { + "name": "ci-macos", + "hidden": true, + "cacheVariables": { + "CMAKE_OSX_ARCHITECTURES": "arm64;x86_64" + }, + "condition": { + "type": "equals", + "lhs": "${hostSystemName}", + "rhs": "Darwin" + } + }, { "name": "ci-Clang", "hidden": true, "cacheVariables": { "CMAKE_TOOLCHAIN_FILE": "config/toolchain/clang.cmake" - }, - "toolset": { - "value": "host=x64", - "strategy": "external" } }, { @@ -79,29 +91,17 @@ "type": "equals", "lhs": "${hostSystemName}", "rhs": "Linux" - }, - "toolset": { - "value": "host=x64", - "strategy": "external" } }, { "name": "ci-Intel", - "hidden": true, - "toolset": { - "value": "host=x64", - "strategy": "external" - } + "hidden": true }, { "name": "ci-Fortran", "hidden": true, "cacheVariables": { "HDF5_BUILD_FORTRAN": "ON" - }, - "toolset": { - "value": "host=x64", - "strategy": "external" } }, { @@ -129,10 +129,6 @@ "hidden": true, "cacheVariables": { "HDF5_BUILD_JAVA": "ON" - }, - "toolset": { - "value": "host=x64", - "strategy": "external" } }, { @@ -201,6 +197,50 @@ "ci-GNUC" ] }, + { + "name": "ci-macos-Debug-Clang", + "description": "Clang/LLVM for x64 (Debug)", + "hidden": true, + "inherits": [ + "ci-base", + "ci-macos", + "ci-Debug", + "ci-Clang" + ] + }, + { + "name": "ci-macos-Release-Clang", + "description": "Clang/LLVM for x64 (Release)", + "hidden": true, + "inherits": [ + "ci-base", + "ci-macos", + "ci-Release", + "ci-Clang" + ] + }, + { + "name": "ci-macos-Debug-GNUC", + "description": "GNUC for x64 (Debug)", + "hidden": true, + "inherits": [ + "ci-base", + "ci-macos", + "ci-Debug", + "ci-GNUC" + ] + }, + { + "name": "ci-macos-Release-GNUC", + "description": "GNUC for x64 (Release)", + "hidden": true, + "inherits": [ + "ci-base", + "ci-macos", + "ci-Release", + "ci-GNUC" + ] + }, { "name": "ci-x64-Debug-Intel", "description": "Intel for x64 (Debug)", @@ -328,6 +368,38 @@ "ci-base" ] }, + { + "name": "ci-macos-Debug-Clang", + "configurePreset": "ci-macos-Debug-Clang", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-macos-Release-Clang", + "configurePreset": "ci-macos-Release-Clang", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-macos-Debug-GNUC", + "configurePreset": "ci-macos-Debug-GNUC", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-macos-Release-GNUC", + "configurePreset": "ci-macos-Release-GNUC", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, { "name": "ci-x64-Debug-Intel", "configurePreset": "ci-x64-Debug-Intel", @@ -453,6 +525,38 @@ "ci-base" ] }, + { + "name": "ci-macos-Debug-Clang", + "configurePreset": "ci-macos-Debug-Clang", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-macos-Release-Clang", + "configurePreset": "ci-macos-Release-Clang", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-macos-Debug-GNUC", + "configurePreset": "ci-macos-Debug-GNUC", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, + { + "name": "ci-macos-Release-GNUC", + "configurePreset": "ci-macos-Release-GNUC", + "hidden": true, + "inherits": [ + "ci-base" + ] + }, { "name": "ci-x64-Debug-Intel", "configurePreset": "ci-x64-Debug-Intel", @@ -536,6 +640,18 @@ "hidden": true, "inherits": "ci-base" }, + { + "name": "ci-macos-Release-Clang", + "configurePreset": "ci-macos-Release-Clang", + "hidden": true, + "inherits": "ci-base" + }, + { + "name": "ci-macos-Release-GNUC", + "configurePreset": "ci-macos-Release-GNUC", + "hidden": true, + "inherits": "ci-base" + }, { "name": "ci-x64-Release-Intel", "configurePreset": "ci-x64-Release-Intel", diff --git a/config/cmake/CPack.Info.plist.in b/config/cmake/CPack.Info.plist.in index b936470fc29..a518f559cc9 100644 --- a/config/cmake/CPack.Info.plist.in +++ b/config/cmake/CPack.Info.plist.in @@ -1,5 +1,5 @@ - - + + CFBundleDevelopmentRegion @@ -16,11 +16,16 @@ FMWK CFBundleSignature ???? + + LSApplicationCategoryType + public.app-category.utilities CFBundleVersion @CPACK_PACKAGE_VERSION@ CFBundleShortVersionString @CPACK_SHORT_VERSION_STRING@ + NSHumanReadableCopyright + Copyright 2006 by The HDF Group CSResourcesFileMapped - + true diff --git a/config/cmake/HDF5ExampleCache.cmake b/config/cmake/HDF5ExampleCache.cmake index 7d5b7be0c40..99232cc06ca 100644 --- a/config/cmake/HDF5ExampleCache.cmake +++ b/config/cmake/HDF5ExampleCache.cmake @@ -7,13 +7,13 @@ # set example options to match build options set (H5EX_BUILD_TESTING ${BUILD_TESTING} CACHE BOOL "Enable examples testing" FORCE) set (H5EX_BUILD_EXAMPLES ${HDF5_BUILD_EXAMPLES} CACHE BOOL "Build Examples" FORCE) -set (HDF_BUILD_FORTRAN ${HDF5_BUILD_FORTRAN} CACHE BOOL "Build examples FORTRAN support" FORCE) -set (HDF_BUILD_JAVA ${HDF5_BUILD_JAVA} CACHE BOOL "Build examples JAVA support" FORCE) -set (HDF_BUILD_FILTERS ${HDF5_ENABLE_PLUGIN_SUPPORT} CACHE BOOL "Build examples PLUGIN filter support" FORCE) -set (HDF_BUILD_CPP_LIB ${HDF5_BUILD_CPP_LIB} CACHE BOOL "Build HDF5 C++ Library" FORCE) -set (HDF_BUILD_HL_LIB ${HDF5_BUILD_HL_LIB} CACHE BOOL "Build HIGH Level examples" FORCE) -set (HDF_ENABLE_THREADSAFE ${HDF5_ENABLE_THREADSAFE} CACHE BOOL "Enable examples thread-safety" FORCE) -set (HDF_ENABLE_PARALLEL ${HDF5_ENABLE_PARALLEL} CACHE BOOL "Enable examples parallel build (requires MPI)" FORCE) +set (H5EX_BUILD_FORTRAN ${HDF5_BUILD_FORTRAN} CACHE BOOL "Build examples FORTRAN support" FORCE) +set (H5EX_BUILD_JAVA ${HDF5_BUILD_JAVA} CACHE BOOL "Build examples JAVA support" FORCE) +set (H5EX_BUILD_FILTERS ${HDF5_ENABLE_PLUGIN_SUPPORT} CACHE BOOL "Build examples PLUGIN filter support" FORCE) +set (H5EX_BUILD_CPP_LIB ${HDF5_BUILD_CPP_LIB} CACHE BOOL "Build HDF5 C++ Library" FORCE) +set (H5EX_BUILD_HL_LIB ${HDF5_BUILD_HL_LIB} CACHE BOOL "Build HIGH Level examples" FORCE) +set (H5EX_ENABLE_THREADSAFE ${HDF5_ENABLE_THREADSAFE} CACHE BOOL "Enable examples thread-safety" FORCE) +set (H5EX_ENABLE_PARALLEL ${HDF5_ENABLE_PARALLEL} CACHE BOOL "Enable examples parallel build (requires MPI)" FORCE) set (H5EX_USE_GNU_DIRS ${HDF5_USE_GNU_DIRS} CACHE BOOL "ON to use GNU Coding Standard install directory variables, OFF to use historical settings" FORCE) #preset HDF5 cache vars to this projects libraries instead of searching @@ -46,7 +46,7 @@ if (NOT BUILD_SHARED_LIBS AND BUILD_STATIC_LIBS) set (H5EX_HDF5_LINK_LIBS ${HDF5_LIB_TARGET} CACHE STRING "HDF5 target" FORCE) if (HDF5_BUILD_FORTRAN) set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_F90_LIB_TARGET}) - set (HDF_MOD_EXT "/static" CACHE STRING "Use Static Modules for Examples" FORCE) + set (H5EX_MOD_EXT "/static" CACHE STRING "Use Static Modules for Examples" FORCE) endif () if (HDF5_BUILD_CPP_LIB) set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_CPP_LIB_TARGET}) @@ -56,7 +56,7 @@ else () set (H5EX_HDF5_LINK_LIBS ${HDF5_LIBSH_TARGET} CACHE STRING "HDF5 target" FORCE) if (HDF5_BUILD_FORTRAN) set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_F90_LIBSH_TARGET}) - set (HDF_MOD_EXT "/shared" CACHE STRING "Use Shared Modules for Examples" FORCE) + set (H5EX_MOD_EXT "/shared" CACHE STRING "Use Shared Modules for Examples" FORCE) endif () if (HDF5_BUILD_CPP_LIB) set (H5EX_HDF5_LINK_LIBS ${H5EX_HDF5_LINK_LIBS} ${HDF5_CPP_LIBSH_TARGET}) diff --git a/config/cmake/HDFCompilerFlags.cmake b/config/cmake/HDFCompilerFlags.cmake index d8a444b84d2..e3364826ba6 100644 --- a/config/cmake/HDFCompilerFlags.cmake +++ b/config/cmake/HDFCompilerFlags.cmake @@ -54,46 +54,8 @@ if (CMAKE_C_COMPILER_ID STREQUAL "NVHPC" ) set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -s") endif () else () - set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Mbounds -g") + set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Mbounds -gopt") endif () - - # With at least NVHPC 23.5 - 23.9, compiling with -O2 or higher and -DNDEBUG - # appears to have issues that manifest in the tests as incorrect metadata - # checksums being read or memory being corrupted. Compiling without -DNDEBUG - # does not appear to have these issues, but is not ideal due to compiling in - # asserts and other library debug code. Compiling with -O1 also does not appear - # to have these issues, so set maximum optimization level to -O1 for now until - # it can be determined whether these issues are compiler-specific or issues - # in the library. - set (cmake_c_flags_minsizerel_edited "${CMAKE_C_FLAGS_MINSIZEREL}") - string (REPLACE "-O2" "" cmake_c_flags_minsizerel_edited "${cmake_c_flags_minsizerel_edited}") - string (REPLACE "-O3" "" cmake_c_flags_minsizerel_edited "${cmake_c_flags_minsizerel_edited}") - string (REPLACE "-O4" "" cmake_c_flags_minsizerel_edited "${cmake_c_flags_minsizerel_edited}") - string (REPLACE "-Ofast" "" cmake_c_flags_minsizerel_edited "${cmake_c_flags_minsizerel_edited}") - string (REPLACE "-fast" "" cmake_c_flags_minsizerel_edited "${cmake_c_flags_minsizerel_edited}") - string (STRIP "${cmake_c_flags_minsizerel_edited}" cmake_c_flags_minsizerel_edited) - string (PREPEND cmake_c_flags_minsizerel_edited "-O1 ") - set (CMAKE_C_FLAGS_MINSIZEREL "${cmake_c_flags_minsizerel_edited}") - - set (cmake_c_flags_release_edited "${CMAKE_C_FLAGS_RELEASE}") - string (REPLACE "-O2" "" cmake_c_flags_release_edited "${cmake_c_flags_release_edited}") - string (REPLACE "-O3" "" cmake_c_flags_release_edited "${cmake_c_flags_release_edited}") - string (REPLACE "-O4" "" cmake_c_flags_release_edited "${cmake_c_flags_release_edited}") - string (REPLACE "-Ofast" "" cmake_c_flags_release_edited "${cmake_c_flags_release_edited}") - string (REPLACE "-fast" "" cmake_c_flags_release_edited "${cmake_c_flags_release_edited}") - string (STRIP "${cmake_c_flags_release_edited}" cmake_c_flags_release_edited) - string (PREPEND cmake_c_flags_release_edited "-O1 ") - set (CMAKE_C_FLAGS_RELEASE "${cmake_c_flags_release_edited}") - - set (cmake_c_flags_relwithdebinfo_edited "${CMAKE_C_FLAGS_RELWITHDEBINFO}") - string (REPLACE "-O2" "" cmake_c_flags_relwithdebinfo_edited "${cmake_c_flags_relwithdebinfo_edited}") - string (REPLACE "-O3" "" cmake_c_flags_relwithdebinfo_edited "${cmake_c_flags_relwithdebinfo_edited}") - string (REPLACE "-O4" "" cmake_c_flags_relwithdebinfo_edited "${cmake_c_flags_relwithdebinfo_edited}") - string (REPLACE "-Ofast" "" cmake_c_flags_relwithdebinfo_edited "${cmake_c_flags_relwithdebinfo_edited}") - string (REPLACE "-fast" "" cmake_c_flags_relwithdebinfo_edited "${cmake_c_flags_relwithdebinfo_edited}") - string (STRIP "${cmake_c_flags_relwithdebinfo_edited}" cmake_c_flags_relwithdebinfo_edited) - string (PREPEND cmake_c_flags_relwithdebinfo_edited "-O1 ") - set (CMAKE_C_FLAGS_RELWITHDEBINFO "${cmake_c_flags_relwithdebinfo_edited}") endif () if (CMAKE_C_COMPILER_ID STREQUAL "GNU") diff --git a/config/cmake/README.md.cmake.in b/config/cmake/README.md.cmake.in index 7f6af3646a2..3f541e4e8a3 100644 --- a/config/cmake/README.md.cmake.in +++ b/config/cmake/README.md.cmake.in @@ -75,6 +75,6 @@ For more information see USING_CMake_Examples.txt in the install folder. =========================================================================== Documentation for this release can be found at the following URL: - https://portal.hdfgroup.org/documentation/index.html#hdf5 + https://support.hdfgroup.org/hdf5/@HDF5_PACKAGE_NAME@-@HDF5_PACKAGE_VERSION@/documentation/doxygen/index.html Bugs should be reported to help@hdfgroup.org. diff --git a/config/cmake/SignPackageFiles.cmake b/config/cmake/SignPackageFiles.cmake new file mode 100644 index 00000000000..81cc5bfff21 --- /dev/null +++ b/config/cmake/SignPackageFiles.cmake @@ -0,0 +1,43 @@ +# This script signs the targets for the package +message(STATUS "Signing script in ${CPACK_TEMPORARY_INSTALL_DIRECTORY} and ${CPACK_PACKAGE_INSTALL_DIRECTORY}") + +# RPM needs ALL_COMPONENTS_IN_ONE added to path between ${CPACK_TEMPORARY_INSTALL_DIRECTORY} and ${CPACK_PACKAGE_INSTALL_DIRECTORY} +if (CPACK_GENERATOR MATCHES "RPM") + set (CPACK_TARGET_FILE_DIRECTORY "${CPACK_TEMPORARY_INSTALL_DIRECTORY}/ALL_COMPONENTS_IN_ONE/${CPACK_PACKAGE_INSTALL_DIRECTORY}") +elseif (CPACK_GENERATOR MATCHES "WIX" OR CPACK_GENERATOR MATCHES "NSIS") + set (CPACK_TARGET_FILE_DIRECTORY "${CPACK_TEMPORARY_INSTALL_DIRECTORY}/libraries") +elseif (CPACK_GENERATOR MATCHES "ZIP") + set (CPACK_TARGET_FILE_DIRECTORY "${CPACK_TEMPORARY_INSTALL_DIRECTORY}") +elseif (CPACK_GENERATOR MATCHES "DragNDrop") + set (CPACK_TARGET_FILE_DIRECTORY "${CPACK_TEMPORARY_INSTALL_DIRECTORY}/ALL_IN_ONE/${CPACK_PACKAGE_INSTALL_DIRECTORY}") +else () + set (CPACK_TARGET_FILE_DIRECTORY "${CPACK_TEMPORARY_INSTALL_DIRECTORY}/${CPACK_PACKAGE_INSTALL_DIRECTORY}") +endif () +file (GLOB target_list LIST_DIRECTORIES false "${CPACK_TARGET_FILE_DIRECTORY}/lib/*" "${CPACK_TARGET_FILE_DIRECTORY}/bin/*" "${CPACK_TARGET_FILE_DIRECTORY}/lib/plugin/*") +foreach (targetfile IN LISTS target_list) + if (WIN32) + # Sign the targets + execute_process (COMMAND $ENV{SIGNTOOLDIR}/signtool + sign /v /debug /fd SHA256 /tr http://timestamp.acs.microsoft.com /td SHA256 + /dlib "Microsoft.Trusted.Signing.Client/bin/x64/Azure.CodeSigning.Dlib.dll" /dmdf ${CPACK_ORIG_SOURCE_DIR}/credentials.json + ${targetfile} + ) + execute_process ( + COMMAND ${CMAKE_COMMAND} -E echo "Signing the target ${targetfile}" + ) + elseif (APPLE) + # Sign the targets + execute_process (COMMAND codesign + --force --timestamp --options runtime --entitlements ${CPACK_ORIG_SOURCE_DIR}/config/cmake/distribution.entitlements + --verbose=4 --strict --sign "$ENV{SIGNER}" + ${targetfile} + ) + execute_process ( + COMMAND ${CMAKE_COMMAND} -E echo "Signing the target ${targetfile}" + ) + else () + execute_process ( + COMMAND ${CMAKE_COMMAND} -E echo "Signing the target ${targetfile}" + ) + endif () +endforeach () diff --git a/config/cmake/distribution.entitlements b/config/cmake/distribution.entitlements new file mode 100644 index 00000000000..0e0df6c7627 --- /dev/null +++ b/config/cmake/distribution.entitlements @@ -0,0 +1,16 @@ + + + + + com.apple.security.cs.allow-jit + + com.apple.security.cs.allow-unsigned-executable-memory + + com.apple.security.cs.disable-executable-page-protection + + com.apple.security.cs.disable-library-validation + + com.apple.security.cs.allow-dyld-environment-variables + + + diff --git a/config/cmake/examples/CTestScript.cmake b/config/cmake/examples/CTestScript.cmake index 657806ce3c2..b1bfa8a9fc1 100644 --- a/config/cmake/examples/CTestScript.cmake +++ b/config/cmake/examples/CTestScript.cmake @@ -137,7 +137,7 @@ set (CTEST_CONFIGURE_COMMAND #----------------------------------------------------------------------------- ## -- set output to english -set ($ENV{LC_MESSAGES} "en_EN") +set (ENV{LC_MESSAGES} "en_EN") #----------------------------------------------------------------------------- configure_file(${CTEST_SOURCE_DIRECTORY}/config/cmake/CTestCustom.cmake ${CTEST_BINARY_DIRECTORY}/CTestCustom.cmake) diff --git a/config/cmake/examples/HDF5_Examples_options.cmake b/config/cmake/examples/HDF5_Examples_options.cmake index 684ec5bf641..2fe145c4704 100644 --- a/config/cmake/examples/HDF5_Examples_options.cmake +++ b/config/cmake/examples/HDF5_Examples_options.cmake @@ -14,14 +14,13 @@ #### format: set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DXXX:YY=ZZZZ") ### #### DEFAULT: ### #### BUILD_SHARED_LIBS:BOOL=OFF ### -#### HDF_BUILD_C:BOOL=ON ### -#### HDF_BUILD_CXX:BOOL=OFF ### -#### HDF_BUILD_FORTRAN:BOOL=OFF ### -#### HDF_BUILD_JAVA:BOOL=OFF ### -#### HDF_BUILD_FILTERS:BOOL=OFF ### -#### BUILD_TESTING:BOOL=OFF ### -#### HDF_ENABLE_PARALLEL:BOOL=OFF ### -#### HDF_ENABLE_THREADSAFE:BOOL=OFF ### +#### H5EX_BUILD_C:BOOL=ON ### +#### H5EX_BUILD_CXX:BOOL=OFF ### +#### H5EX_BUILD_FORTRAN:BOOL=OFF ### +#### H5EX_BUILD_JAVA:BOOL=OFF ### +#### H5EX_BUILD_FILTERS:BOOL=OFF ### +#### H5EX_BUILD_TESTING:BOOL=OFF ### +#### H5EX_ENABLE_PARALLEL:BOOL=OFF ### ############################################################################################# ### uncomment/comment and change the following lines for other configuration options @@ -44,38 +43,34 @@ ############################################################################################# #### languages #### ### disable C builds -#set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF_BUILD_C:BOOL=OFF") +#set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DH5EX_BUILD_C:BOOL=OFF") ### enable C++ builds -#set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF_BUILD_CXX:BOOL=ON") +#set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DH5EX_BUILD_CXX:BOOL=ON") ### enable Fortran builds -#set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF_BUILD_FORTRAN:BOOL=ON") +#set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DH5EX_BUILD_FORTRAN:BOOL=ON") ### enable JAVA builds -#set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF_BUILD_JAVA:BOOL=ON") +#set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DH5EX_BUILD_JAVA:BOOL=ON") ############################################################################################# ### enable FILTERS builds -#set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF_BUILD_FILTERS:BOOL=ON") +#set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DH5EX_BUILD_FILTERS:BOOL=ON") ### default HDF5_PLUGIN_PATH to where the filter libraries are located #set(ENV{HDF5_PLUGIN_PATH} "${INSTALLDIR}/lib/plugin") ############################################################################################# ### enable parallel program builds -#set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF_ENABLE_PARALLEL:BOOL=ON") +#set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DH5EX_ENABLE_PARALLEL:BOOL=ON") ############################################################################################# ### match the hdf5 library namespace set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF5_NAMESPACE:STRING=hdf5::") -############################################################################################# -### enable threadsafe program builds -#set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DHDF_ENABLE_THREADSAFE:BOOL=ON") - ############################################################################################# ### enable test program builds, requires reference files in testfiles subdirectory -#set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DBUILD_TESTING:BOOL=ON") -#set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DCOMPARE_TESTING:BOOL=ON") +#set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DH5EX_BUILD_TESTING:BOOL=ON") +#set(ADD_BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DH5EX_COMPARE_TESTING:BOOL=ON") ############################################################################################# diff --git a/config/cmake/scripts/CTestScript.cmake b/config/cmake/scripts/CTestScript.cmake index ad6cd44014b..46037f573b1 100644 --- a/config/cmake/scripts/CTestScript.cmake +++ b/config/cmake/scripts/CTestScript.cmake @@ -51,15 +51,15 @@ endif () set (BUILD_OPTIONS "${ADD_BUILD_OPTIONS} -DSITE:STRING=${CTEST_SITE} -DBUILDNAME:STRING=${CTEST_BUILD_NAME}") # Launchers work only with Makefile and Ninja generators. -if(NOT "${CTEST_CMAKE_GENERATOR}" MATCHES "Make|Ninja" OR LOCAL_SKIP_TEST) - set(CTEST_USE_LAUNCHERS 0) - set(ENV{CTEST_USE_LAUNCHERS_DEFAULT} 0) - set(BUILD_OPTIONS "${BUILD_OPTIONS} -DCTEST_USE_LAUNCHERS:BOOL=OFF") -else() - set(CTEST_USE_LAUNCHERS 1) - set(ENV{CTEST_USE_LAUNCHERS_DEFAULT} 1) - set(BUILD_OPTIONS "${BUILD_OPTIONS} -DCTEST_USE_LAUNCHERS:BOOL=ON") -endif() +if (NOT "${CTEST_CMAKE_GENERATOR}" MATCHES "Make|Ninja" OR LOCAL_SKIP_TEST) + set (CTEST_USE_LAUNCHERS 0) + set (ENV{CTEST_USE_LAUNCHERS_DEFAULT} 0) + set (BUILD_OPTIONS "${BUILD_OPTIONS} -DCTEST_USE_LAUNCHERS:BOOL=OFF") +else () + set (CTEST_USE_LAUNCHERS 1) + set (ENV{CTEST_USE_LAUNCHERS_DEFAULT} 1) + set (BUILD_OPTIONS "${BUILD_OPTIONS} -DCTEST_USE_LAUNCHERS:BOOL=ON") +endif () #----------------------------------------------------------------------------- # MAC machines need special option @@ -206,7 +206,7 @@ endif () #----------------------------------------------------------------------------- ## -- set output to english -set ($ENV{LC_MESSAGES} "en_EN") +set (ENV{LC_MESSAGES} "en_EN") # Print summary information. foreach (v diff --git a/config/nvidia-flags b/config/nvidia-flags index c140edd9830..39bca831314 100644 --- a/config/nvidia-flags +++ b/config/nvidia-flags @@ -92,7 +92,7 @@ if test "X-nvc" = "X-$cc_vendor" -o "X-nvcc" = "X-$cc_vendor"; then ########### NO_SYMBOLS_CFLAGS="-s" - SYMBOLS_CFLAGS="-g" + SYMBOLS_CFLAGS="-gopt" ############# # Profiling # @@ -106,9 +106,8 @@ if test "X-nvc" = "X-$cc_vendor" -o "X-nvcc" = "X-$cc_vendor"; then # Optimization # ################ - HIGH_OPT_CFLAGS="-O1" # -O2+ currently has test failures. - #DEBUG_OPT_CFLAGS="-gopt -O2" - DEBUG_OPT_CFLAGS="-gopt -O1" # -O2+ currently has test failures. + HIGH_OPT_CFLAGS="-O3" + DEBUG_OPT_CFLAGS="-O1" # -O0 can be very slow NO_OPT_CFLAGS="-O0" ################# diff --git a/config/toolchain/mingw64.cmake b/config/toolchain/mingw64.cmake index 1b138919087..d4d2e4e0532 100644 --- a/config/toolchain/mingw64.cmake +++ b/config/toolchain/mingw64.cmake @@ -1,4 +1,4 @@ -set(TOOLCHAIN_PREFIX x86_64-w64-mingw32) +set (TOOLCHAIN_PREFIX x86_64-w64-mingw32) set (CMAKE_SYSTEM_NAME Windows) set (CMAKE_C_COMPILER ${TOOLCHAIN_PREFIX}-gcc) set (CMAKE_CXX_COMPILER ${TOOLCHAIN_PREFIX}-g++) diff --git a/configure.ac b/configure.ac index 8b632073825..8226ede1e2a 100644 --- a/configure.ac +++ b/configure.ac @@ -3812,10 +3812,10 @@ AC_DEFINE_UNQUOTED([DEFAULT_PLUGINDIR], ["$default_plugindir"], ## for the speed optimization of hard conversions. Soft conversions can ## actually benefit little. ## -AC_MSG_CHECKING([whether exception handling functions is checked during data conversions]) +AC_MSG_CHECKING([whether exception handling functions are checked during data conversions]) AC_ARG_ENABLE([dconv-exception], [AS_HELP_STRING([--enable-dconv-exception], - [if exception handling functions is checked during + [Check exception handling functions during data conversions [default=yes]])], [DCONV_EXCEPTION=$enableval], [DCONV_EXCEPTION=yes]) @@ -3836,7 +3836,7 @@ fi AC_MSG_CHECKING([whether data accuracy is guaranteed during data conversions]) AC_ARG_ENABLE([dconv-accuracy], [AS_HELP_STRING([--enable-dconv-accuracy], - [if data accuracy is guaranteed during + [Guarantee data accuracy during data conversions [default=yes]])], [DATA_ACCURACY=$enableval], [DATA_ACCURACY=yes]) diff --git a/doc/parallel-compression.md b/doc/parallel-compression.md index ae05b2090bd..080845f64ad 100644 --- a/doc/parallel-compression.md +++ b/doc/parallel-compression.md @@ -64,9 +64,9 @@ H5Dwrite(..., dxpl_id, ...); The following are two simple examples of using the parallel compression feature: -[ph5_filtered_writes.c](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/HDF5Examples/C/H5PAR/ph5_filtered_writes.c) +[ph5_filtered_writes.c][u1] -[ph5_filtered_writes_no_sel.c](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/HDF5Examples/C/H5PAR/ph5_filtered_writes_no_sel.c) +[ph5_filtered_writes_no_sel.c][u2] The former contains simple examples of using the parallel compression feature to write to compressed datasets, while the @@ -79,7 +79,7 @@ participate in the collective write call. ## Multi-dataset I/O support The parallel compression feature is supported when using the -multi-dataset I/O API routines ([H5Dwrite_multi](https://hdfgroup.github.io/hdf5/v1_14/group___h5_d.html#gaf6213bf3a876c1741810037ff2bb85d8)/[H5Dread_multi](https://hdfgroup.github.io/hdf5/v1_14/group___h5_d.html#ga8eb1c838aff79a17de385d0707709915)), but the +multi-dataset I/O API routines ([H5Dwrite_multi][u3]/[H5Dread_multi][u4]), but the following should be kept in mind: - Parallel writes to filtered datasets **must** still be collective, @@ -99,7 +99,7 @@ following should be kept in mind: ## Incremental file space allocation support -HDF5's [file space allocation time](https://hdfgroup.github.io/hdf5/v1_14/group___d_c_p_l.html#ga85faefca58387bba409b65c470d7d851) +HDF5's [file space allocation time][u5] is a dataset creation property that can have significant effects on application performance, especially if the application uses parallel HDF5. In a serial HDF5 application, the default file space @@ -118,7 +118,7 @@ While this strategy has worked in the past, it has some noticeable drawbacks. For one, the larger the chunked dataset being created, the more noticeable overhead there will be during dataset creation as all of the data chunks are being allocated in the HDF5 file. -Further, these data chunks will, by default, be [filled](https://hdfgroup.github.io/hdf5/v1_14/group___d_c_p_l.html#ga4335bb45b35386daa837b4ff1b9cd4a4) +Further, these data chunks will, by default, be [filled][u6] with HDF5's default fill data value, leading to extraordinary dataset creation overhead and resulting in pre-filling large portions of a dataset that the application might have been planning @@ -126,12 +126,12 @@ to overwrite anyway. Even worse, there will be more initial overhead from compressing that fill data before writing it out, only to have it read back in, unfiltered and modified the first time a chunk is written to. In the past, it was typically suggested that parallel -HDF5 applications should use [H5Pset_fill_time](https://hdfgroup.github.io/hdf5/v1_14/group___d_c_p_l.html#ga6bd822266b31f86551a9a1d79601b6a2) +HDF5 applications should use [H5Pset_fill_time][u7] with a value of `H5D_FILL_TIME_NEVER` in order to disable writing of the fill value to dataset chunks, but this isn't ideal if the application actually wishes to make use of fill values. -With [improvements made](https://www.hdfgroup.org/2022/03/parallel-compression-improvements-in-hdf5-1-13-1/) +With [improvements made][u8] to the parallel compression feature for the HDF5 1.13.1 release, "incremental" file space allocation is now the default for datasets created in parallel *only if they have filters applied to them*. @@ -154,7 +154,7 @@ optimal performance out of the parallel compression feature. ### Begin with a good chunking strategy -[Starting with a good chunking strategy](https://portal.hdfgroup.org/documentation/hdf5-docs/chunking_in_hdf5.html) +[Starting with a good chunking strategy][u9] will generally have the largest impact on overall application performance. The different chunking parameters can be difficult to fine-tune, but it is essential to start with a well-performing @@ -166,7 +166,7 @@ chosen chunk size becomes a very important factor when compression is involved, as data chunks have to be completely read and re-written to perform partial writes to the chunk. -[Improving I/O performance with HDF5 compressed datasets](https://docs.hdfgroup.org/archive/support/HDF5/doc/TechNotes/TechNote-HDF5-ImprovingIOPerformanceCompressedDatasets.pdf) +[Improving I/O performance with HDF5 compressed datasets][u10] is a useful reference for more information on getting good performance when using a chunked dataset layout. @@ -220,14 +220,14 @@ chunks to end up at addresses in the file that do not align well with the underlying file system, possibly leading to poor performance. As an example, Lustre performance is generally good when writes are aligned with the chosen stripe size. -The HDF5 application can use [H5Pset_alignment](https://hdfgroup.github.io/hdf5/v1_14/group___f_a_p_l.html#gab99d5af749aeb3896fd9e3ceb273677a) +The HDF5 application can use [H5Pset_alignment][u11] to have a bit more control over where objects in the HDF5 file end up. However, do note that setting the alignment of objects generally wastes space in the file and has the potential to dramatically increase its resulting size, so caution should be used when choosing the alignment parameters. -[H5Pset_alignment](https://hdfgroup.github.io/hdf5/v1_14/group___f_a_p_l.html#gab99d5af749aeb3896fd9e3ceb273677a) +[H5Pset_alignment][u11] has two parameters that control the alignment of objects in the HDF5 file, the "threshold" value and the alignment value. The threshold value specifies that any object greater @@ -264,19 +264,19 @@ in a file, this can create significant amounts of free space in the file over its lifetime and eventually cause performance issues. -An HDF5 application can use [H5Pset_file_space_strategy](https://hdfgroup.github.io/hdf5/v1_14/group___f_c_p_l.html#ga167ff65f392ca3b7f1933b1cee1b9f70) +An HDF5 application can use [H5Pset_file_space_strategy][u12] with a value of `H5F_FSPACE_STRATEGY_PAGE` to enable the paged aggregation feature, which can accumulate metadata and raw data for dataset data chunks into well-aligned, configurably sized "pages" for better performance. However, note that using the paged aggregation feature will cause any setting from -[H5Pset_alignment](https://hdfgroup.github.io/hdf5/v1_14/group___f_a_p_l.html#gab99d5af749aeb3896fd9e3ceb273677a) +[H5Pset_alignment][u11] to be ignored. While an application should be able to get -comparable performance effects by [setting the size of these pages](https://hdfgroup.github.io/hdf5/v1_14/group___f_c_p_l.html#gad012d7f3c2f1e1999eb1770aae3a4963) to be equal to the value that -would have been set for [H5Pset_alignment](https://hdfgroup.github.io/hdf5/v1_14/group___f_a_p_l.html#gab99d5af749aeb3896fd9e3ceb273677a), +comparable performance effects by [setting the size of these pages][u13] +to be equal to the value that would have been set for [H5Pset_alignment][u11], this may not necessarily be the case and should be studied. -Note that [H5Pset_file_space_strategy](https://hdfgroup.github.io/hdf5/v1_14/group___f_c_p_l.html#ga167ff65f392ca3b7f1933b1cee1b9f70) +Note that [H5Pset_file_space_strategy][u12] has a `persist` parameter. This determines whether or not the file free space manager should include extra metadata in the HDF5 file about free space sections in the file. If this @@ -300,12 +300,12 @@ hid_t file_id = H5Fcreate("file.h5", H5F_ACC_TRUNC, fcpl_id, fapl_id); While the parallel compression feature requires that the HDF5 application set and maintain collective I/O at the application -interface level (via [H5Pset_dxpl_mpio](https://hdfgroup.github.io/hdf5/v1_14/group___d_x_p_l.html#ga001a22b64f60b815abf5de8b4776f09e)), +interface level (via [H5Pset_dxpl_mpio][u14]), it does not require that the actual MPI I/O that occurs at the lowest layers of HDF5 be collective; independent I/O may perform better depending on the application I/O patterns and parallel file system performance, among other factors. The -application may use [H5Pset_dxpl_mpio_collective_opt](https://hdfgroup.github.io/hdf5/v1_14/group___d_x_p_l.html#gacb30d14d1791ec7ff9ee73aa148a51a3) +application may use [H5Pset_dxpl_mpio_collective_opt][u15] to control this setting and see which I/O method provides the best performance. @@ -318,7 +318,7 @@ H5Dwrite(..., dxpl_id, ...); ### Runtime HDF5 Library version -An HDF5 application can use the [H5Pset_libver_bounds](https://hdfgroup.github.io/hdf5/v1_14/group___f_a_p_l.html#gacbe1724e7f70cd17ed687417a1d2a910) +An HDF5 application can use the [H5Pset_libver_bounds][u16] routine to set the upper and lower bounds on library versions to use when creating HDF5 objects. For parallel compression specifically, setting the library version to the latest available @@ -332,3 +332,20 @@ H5Pset_libver_bounds(fapl_id, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST); hid_t file_id = H5Fcreate("file.h5", H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id); ... ``` + +[u1]: https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/HDF5Examples/C/H5PAR/ph5_filtered_writes.c +[u2]: https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/HDF5Examples/C/H5PAR/ph5_filtered_writes_no_sel.c +[u3]: https://hdfgroup.github.io/hdf5/develop/group___h5_d.html#gaf6213bf3a876c1741810037ff2bb85d8 +[u4]: https://hdfgroup.github.io/hdf5/develop/group___h5_d.html#ga8eb1c838aff79a17de385d0707709915 +[u5]: https://hdfgroup.github.io/hdf5/develop/group___d_c_p_l.html#ga85faefca58387bba409b65c470d7d851 +[u6]: https://hdfgroup.github.io/hdf5/develop/group___d_c_p_l.html#ga4335bb45b35386daa837b4ff1b9cd4a4 +[u7]: https://hdfgroup.github.io/hdf5/develop/group___d_c_p_l.html#ga6bd822266b31f86551a9a1d79601b6a2 +[u8]: https://support.hdfgroup.org/documentation/hdf5/parallel-compression-improvements-in-hdf5-1-13-1 +[u9]: https://support.hdfgroup.org/documentation/hdf5/chunking_in_hdf5.html +[u10]: https://support.hdfgroup.org/documentation/hdf5/technotes/TechNote-HDF5-ImprovingIOPerformanceCompressedDatasets.pdf +[u11]: https://hdfgroup.github.io/hdf5/develop/group___f_a_p_l.html#gab99d5af749aeb3896fd9e3ceb273677a +[u12]: https://hdfgroup.github.io/hdf5/develop/group___f_c_p_l.html#ga167ff65f392ca3b7f1933b1cee1b9f70 +[u13]: https://hdfgroup.github.io/hdf5/develop/group___f_c_p_l.html#gad012d7f3c2f1e1999eb1770aae3a4963 +[u14]: https://hdfgroup.github.io/hdf5/develop/group___d_x_p_l.html#ga001a22b64f60b815abf5de8b4776f09e +[u15]: https://hdfgroup.github.io/hdf5/develop/group___d_x_p_l.html#gacb30d14d1791ec7ff9ee73aa148a51a3 +[u16]: https://hdfgroup.github.io/hdf5/develop/group___f_a_p_l.html#gacbe1724e7f70cd17ed687417a1d2a910 diff --git a/doxygen/aliases b/doxygen/aliases index 43a7d64eee2..f13c92429fb 100644 --- a/doxygen/aliases +++ b/doxygen/aliases @@ -4,17 +4,16 @@ ALIASES += THG="The HDF Group" # Default URLs (Note that md files do not use any aliases) ################################################################################ # Default URL for HDF Group Files -ALIASES += HDFURL="docs.hdfgroup.org/hdf5" +ALIASES += HDFURL="support.hdfgroup.org" # URL for archived files -ALIASES += ARCURL="docs.hdfgroup.org/archive/support/HDF5/doc" -# URL for RFCs -ALIASES += RFCURL="docs.hdfgroup.org/hdf5/rfc" +ALIASES += ARCURL="\HDFURL/archive/support/HDF5/doc" # URL for documentation -ALIASES += DSPURL="portal.hdfgroup.org/display/HDF5" -ALIASES += DOCURL="portal.hdfgroup.org/documentation/hdf5-docs" +ALIASES += DOCURL="\HDFURL/releases/hdf5/documentation" # URL for downloads -ALIASES += DWNURL="portal.hdfgroup.org/downloads" -ALIASES += AEXURL="support.hdfgroup.org/ftp/HDF5/examples" +ALIASES += DWNURL="\HDFURL/releases/hdf5/downloads" +# URL for RFCs +ALIASES += RFCURL="\DOCURL/rfc" +ALIASES += AEXURL="\HDFURL/archive/support/ftp/HDF5/examples" # doxygen subdir (develop, v1_14) ALIASES += DOXURL="hdfgroup.github.io/hdf5/v1_14" #branch name (develop, hdf5_1_14) @@ -259,13 +258,13 @@ ALIASES += sa_metadata_ops="\sa \li H5Pget_all_coll_metadata_ops() \li H5Pget_co ALIASES += ref_cons_semantics="Enabling a Strict Consistency Semantics Model in Parallel HDF5" ALIASES += ref_file_image_ops="HDF5 File Image Operations" -ALIASES += ref_filter_pipe="Data Flow Pipeline for H5Dread()" +ALIASES += ref_filter_pipe="Data Flow Pipeline for H5Dread()" ALIASES += ref_group_impls="Group implementations in HDF5" ALIASES += ref_h5lib_relver="HDF5 Library Release Version Numbers" -ALIASES += ref_mdc_in_hdf5="Metadata Caching in HDF5" -ALIASES += ref_mdc_logging="Metadata Cache Logging" +ALIASES += ref_mdc_in_hdf5="Metadata Caching in HDF5" +ALIASES += ref_mdc_logging="Metadata Cache Logging" ALIASES += ref_news_112="New Features in HDF5 Release 1.12" -ALIASES += ref_h5ocopy="Copying Committed Datatypes with H5Ocopy()" +ALIASES += ref_h5ocopy="Copying Committed Datatypes with H5Ocopy()" ALIASES += ref_sencode_fmt_change="RFC H5Sencode() / H5Sdecode() Format Change" ALIASES += ref_vlen_strings="\Emph{Creating variable-length string datatypes}" ALIASES += ref_vol_doc="VOL documentation" diff --git a/doxygen/dox/About.dox b/doxygen/dox/About.dox index 120156eef71..73010b0c3de 100644 --- a/doxygen/dox/About.dox +++ b/doxygen/dox/About.dox @@ -83,7 +83,7 @@ as a general reference. All custom commands for this project are located in the aliases -file in the doxygen +file in the doxygen subdirectory of the main HDF5 repo. The custom commands are grouped in sections. Find a suitable section for your command or diff --git a/doxygen/dox/CollectiveCalls.dox b/doxygen/dox/CollectiveCalls.dox new file mode 100644 index 00000000000..9f26896262b --- /dev/null +++ b/doxygen/dox/CollectiveCalls.dox @@ -0,0 +1,1265 @@ +/** \page collective_calls Collective Calling Requirements in Parallel HDF5 Applications + * + * \section sec_collective_calls_intro Introduction + * This document addresses two topics of concern + in a parallel computing environment: + + + The term @ref options in the "Additional notes" + column indicates that the first item in the "Function" + column of the same row is a macro that is selectively mapped to one + of the two immediately-following functions. + For example, #H5Acreate is a macro that can be mapped to + either #H5Acreate1 or #H5Acreate2. + This mapping is configurable and is explained in + \ref api-compat-macros. + The macro structure was introduced at HDF5 Release 1.8.0. + * + * \section sec_collective_calls_func Always collective + * The following functions must always be called collectively. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ API + + Function + + All processes: +
+ same datatype & dataspace +
+ All processes: +
+ same access properties +
+ All processes: +
+ same creation properties +
+ Available in releases since + + Additional notes +
+ H5A + + #H5Acreate +
+ #H5Acreate1 +
+ #H5Acreate2 +
+ X + + X + + X + + 1.8.x + + @ref options +
+ The function #H5Acreate was renamed to + #H5Acreate1 at Release 1.8.0. +
+ + #H5Acreate_by_name + + X + + X + + X + + 1.8.x + +
+ + #H5Adelete + + + + + +
+ + #H5Adelete_by_idx + + + + + 1.8.x + +
+ + #H5Adelete_by_name + + + + + 1.8.x + +
+ + #H5Arename + + + + + 1.6.x + +
+ + #H5Arename_by_name + + + + + 1.8.x + +
+ + #H5Awrite + + + + + + Because raw data for an attribute is cached locally, + all processes must participate in order to guarantee that + future #H5Aread calls return correct results + on all processes. +
+
+ H5D + + #H5Dcreate +
+ #H5Dcreate1 +
+ #H5Dcreate2 +
+ X + + X + + X + + 1.8.x + + @ref options +
+ The function #H5Dcreate was renamed to + #H5Dcreate1 at Release 1.8.0. +
+ + #H5Dcreate_anon + + X + + X + + X + + 1.8.x + +
+ + #H5Dextend + + + + + + All processes must participate only if the number of chunks + in the dataset actually changes. +
+ All processes must use the same dataspace dimensions. +
+ + #H5Dset_extent + + + + + 1.6.x + + All processes must participate only if the number of chunks + in the dataset actually changes. +
+ All processes must use the same dataspace dimensions. +
+
+ H5F + + #H5Fclose + + + + + + All processes must participate only if this is the + last reference to the file identifier. +
+ + #H5Fcreate + + + X + + X + + +
+ + #H5Fflush + + + + + +
+ + #H5Fmount + + + + + +
+ + #H5Fopen + + + X + + + +
+ + #H5Freopen + + + + + +
+ + #H5Funmount + + + + + +
+
+ H5G + + #H5Gcreate +
+ #H5Gcreate1 +
+ #H5Gcreate2 +
+ + X + + X + + 1.8.x + + @ref options +
+ The function #H5Gcreate was renamed to + #H5Gcreate1 at Release 1.8.0. +
+ + #H5Gcreate_anon + + + X + + X + + 1.8.x + +
+ + #H5Glink + + + + + +
+ + #H5Glink2 + + + + + 1.6.x + +
+ + #H5Gmove + + + + + +
+ + #H5Gmove2 + + + + + 1.6.x + +
+ + #H5Gset_comment + + + + + +
+ + #H5Gunlink + + + + + +
+
+ H5I + + #H5Idec_ref + + + + + 1.6.x + + This function may be called independently if the object identifier + does not refer to an object that was collectively opened. +
+ + #H5Iinc_ref + + + + + 1.6.x + + This function may be called independently if the object identifier + does not refer to an object that was collectively opened. +
+
+ H5L + + #H5Lcopy + + + + + 1.8.x + +
+ + #H5Lcreate_external + + + + X + + 1.8.x + +
+ + #H5Lcreate_hard + + + + X + + 1.8.x + +
+ + #H5Lcreate_soft + + + + X + + 1.8.x + +
+ + #H5Lcreate_ud + + + + X + + 1.8.x + +
+ + #H5Ldelete + + + + + 1.8.x + +
+ + #H5Ldelete_by_idx + + + + + 1.8.x + +
+ + #H5Lmove + + + + + 1.8.x + +
+
+ H5O + + #H5Ocopy + + + + + 1.8.x + +
+ + #H5Odecr_refcount + + + + + 1.8.x + +
+ + #H5Oincr_refcount + + + + + 1.8.x + +
+ + #H5Olink + + + + + 1.8.x + +
+ + #H5Oset_comment + + + + + 1.8.x + +
+ + #H5Oset_comment_by_name + + + + + 1.8.x + +
+
+ H5R + + #H5Rcreate + + + + + +
+
+ H5T + + #H5Tcommit +
+ #H5Tcommit1 +
+ #H5Tcommit2 +
+ + X + + X + + 1.8.x + + @ref options +
+ The function #H5Tcommit was renamed to + #H5Tcommit1 at Release 1.8.0. +
+ + #H5Tcommit_anon + + + X + + X + + 1.8.x + +
+ * + * \section sec_collective_calls_nomod Collective, unless target object will not be modified + * The following functions must normally be called collectively. + * If, however, the target object will not be modified, + * they may be called independently. + * + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ API + + Function + + All processes: +
+ same datatype & dataspace +
+ All processes: +
+ same access properties +
+ All processes: +
+ same creation properties +
+ Available in releases since + + Additional notes +
+ H5A + + #H5Aclose + + + + + + All processes must participate only if + all file identifiers for a file have been closed and + this is the last outstanding object identifier. +
+ + #H5Aopen + + + X + + + 1.8.x + +
+ + #H5Aopen_by_idx + + + X + + + 1.8.x + +
+ + #H5Aopen_by_name + + + X + + + 1.8.x + +
+ + #H5Aopen_idx + + + X + + + +
+ + #H5Aopen_name + + + X + + + +
+
+ H5D + + #H5Dclose + + + + + + All processes must participate only if + all file identifiers for a file have been closed and + this is the last outstanding object identifier. +
+ + #H5Dopen +
+ #H5Dopen1 +
+ #H5Dopen2 +
+ + X + + + 1.8.x + + @ref options +
+ The function #H5Dopen was renamed to + #H5Dopen1 at Release 1.8.0. +
+
+ H5G + + #H5Gclose + + + + + + All processes must participate only if + all file identifiers for a file have been closed and + this is the last outstanding object identifier. +
+ + #H5Gopen +
+ #H5Gopen1 +
+ #H5Gopen2 +
+ + X + + + 1.8.x + + @ref options +
+ The function #H5Gopen was renamed to + #H5Gopen1 at Release 1.8.0. +
+
+ H5I + + #H5Iget_file_id + + + + + 1.8.x + +
+
+ H5O + + #H5Oclose + + + + + 1.8.x + + All processes must participate only if + all file identifiers for a file have been closed and + this is the last outstanding object identifier. +
+ + #H5Oopen + + + X + + + 1.8.x + +
+ + #H5Oopen_by_addr + + + X + + + 1.8.x + +
+ + #H5Oopen_by_idx + + + X + + + 1.8.x + +
+
+ H5R + + #H5Rdereference + + + + + +
+
+ H5T + + #H5Tclose + + + + + + All processes must participate only if + the datatype is for a committed datatype, + all the file identifiers for the file have been closed, and + this is the last outstanding object identifier. +
+ + #H5Topen +
+ #H5Topen1 +
+ #H5Topen2 +
+ + X + + + 1.8.x + + @ref options +
+ The function #H5Topen was renamed to + #H5Topen1 at Release 1.8.0. +
+ * + * \section sec_collective_calls_props Properties + * The following properties must be set to the same values + * for an object or link in all cases where the object or link is accessed + * in a parallel program. + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ Function + + Available in releases since +
+ Dataset creation properties +
+ #H5Pmodify_filter + + 1.8.x +
+ #H5Premove_filter + + 1.8.x +
+ #H5Pset_alloc_time + +
+ #H5Pset_chunk + +
+ #H5Pset_external + +
+ #H5Pset_fill_time + +
+ #H5Pset_fill_value + +
+ #H5Pset_filter + +
+ #H5Pset_fletcher32 + + 1.8.x +
+ #H5Pset_layout + +
+ #H5Pset_nbit + + 1.8.x +
+ #H5Pset_shuffle + +
+ #H5Pset_szip + +
+
+ Dataset transfer properties + +
+ #H5Pset_btree_ratios + +
+ #H5Pset_buffer + +
+ #H5Pset_dxpl_mpio + +
+ #H5Pset_preserve + +
+
+ File access properties + +
+ #H5Pset_alignment + +
+ #H5Pset_cache + +
+ #H5Pset_fapl_mpio + +
+ #H5Pset_fclose_degree + +
+ #H5Pset_gc_references + +
+ #H5Fset_latest_format + + 1.8.x +
+ #H5Pset_libver_bounds + + 1.8.x +
+ #H5Pset_mdc_config + +
+ #H5Pset_meta_block_size + +
+ #H5Pset_small_data_block_size + +
+ #H5Pset_sieve_buf_size + +
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ Function + + Available in releases since +
+ File creation properties + +
+ #H5Pset_istore_k + +
+ #H5Pset_shared_mesg_index + + 1.8.x +
+ #H5Pset_shared_mesg_nindexes + + 1.8.x +
+ #H5Pset_shared_mesg_phase_change + + 1.8.x +
+ #H5Pset_sizes + +
+ #H5Pset_sym_k + +
+ #H5Pset_userblock + +
+
+ Group creation properties + +
+ #H5Pset_est_link_info + + 1.8.x +
+ #H5Pset_link_creation_order + + 1.8.x +
+ #H5Pset_link_phase_change + + 1.8.x +
+ #H5Pset_local_heap_size_hint + + 1.8.x +
+
+ Link creation properties + +
+ #H5Pset_char_encoding + + 1.8.x +
+ #H5Pset_create_intermediate_group + + 1.8.x +
+
+ Object creation properties + +
+ #H5Pset_attr_phase_change + + 1.8.x +
+ #H5Pset_attr_creation_order + + 1.8.x +
+ #H5Pset_obj_track_times + + 1.8.x +
+
+ Object copy properties + +
+ #H5Pset_copy_object + + 1.8.x +
+
+ + */ diff --git a/doxygen/dox/DDLBNF110.dox b/doxygen/dox/DDLBNF110.dox index 6d6b67ef7fd..b392526417a 100644 --- a/doxygen/dox/DDLBNF110.dox +++ b/doxygen/dox/DDLBNF110.dox @@ -1,7 +1,5 @@ /** \page DDLBNF110 DDL in BNF through HDF5 1.10 -\todo Revise this & break it up! - \section intro110 Introduction This document contains the data description language (DDL) for an HDF5 file. The diff --git a/doxygen/dox/DDLBNF112.dox b/doxygen/dox/DDLBNF112.dox index cfe34c321f9..c6463c23d5c 100644 --- a/doxygen/dox/DDLBNF112.dox +++ b/doxygen/dox/DDLBNF112.dox @@ -1,7 +1,5 @@ /** \page DDLBNF112 DDL in BNF for HDF5 1.12 through HDF5 1.14.3 -\todo Revise this & break it up! - \section intro112 Introduction This document contains the data description language (DDL) for an HDF5 file. The diff --git a/doxygen/dox/DDLBNF114.dox b/doxygen/dox/DDLBNF114.dox index 61e9157e560..baa7a57fea6 100644 --- a/doxygen/dox/DDLBNF114.dox +++ b/doxygen/dox/DDLBNF114.dox @@ -1,7 +1,5 @@ /** \page DDLBNF114 DDL in BNF for HDF5 1.14.4 and above -\todo Revise this & break it up! - \section intro114 Introduction This document contains the data description language (DDL) for an HDF5 file. The diff --git a/doxygen/dox/ExamplesAPI.dox b/doxygen/dox/ExamplesAPI.dox index d4f50a5c8a8..8a5eca374df 100644 --- a/doxygen/dox/ExamplesAPI.dox +++ b/doxygen/dox/ExamplesAPI.dox @@ -30,7 +30,7 @@ Languages are C, Fortran, Java (JHI5), Java Object Package, Python (High Level), C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_d_alloc.h5 @@ -43,7 +43,7 @@ Languages are C, Fortran, Java (JHI5), Java Object Package, Python (High Level), C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_d_checksum.h5 @@ -56,7 +56,7 @@ Languages are C, Fortran, Java (JHI5), Java Object Package, Python (High Level), C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_d_chunk.h5 @@ -69,7 +69,7 @@ Languages are C, Fortran, Java (JHI5), Java Object Package, Python (High Level), C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_d_compact.h5 @@ -82,7 +82,7 @@ Languages are C, Fortran, Java (JHI5), Java Object Package, Python (High Level), C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_d_extern.h5 @@ -95,7 +95,7 @@ Languages are C, Fortran, Java (JHI5), Java Object Package, Python (High Level), C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_d_fillval.h5 @@ -108,7 +108,7 @@ Languages are C, Fortran, Java (JHI5), Java Object Package, Python (High Level), C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_d_gzip.h5 @@ -121,7 +121,7 @@ Languages are C, Fortran, Java (JHI5), Java Object Package, Python (High Level), C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_d_hyper.h5 @@ -134,7 +134,7 @@ Languages are C, Fortran, Java (JHI5), Java Object Package, Python (High Level), C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_d_nbit.h5 @@ -147,7 +147,7 @@ Languages are C, Fortran, Java (JHI5), Java Object Package, Python (High Level), C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_d_rdwrc.h5 @@ -160,7 +160,7 @@ Languages are C, Fortran, Java (JHI5), Java Object Package, Python (High Level), C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_d_shuffle.h5 @@ -173,7 +173,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_d_sofloat.h5 @@ -186,7 +186,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_d_soint.h5 @@ -199,7 +199,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_d_szip.h5 @@ -212,7 +212,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_d_transform.h5 @@ -225,7 +225,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_d_unlimadd.h5 @@ -238,7 +238,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_d_unlimgzip.h5 @@ -251,7 +251,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_d_unlimmod.h5 @@ -275,7 +275,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_g_compact.h5 @@ -289,7 +289,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_g_corder.h5 @@ -302,7 +302,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_g_create.h5 @@ -315,7 +315,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_g_intermediate.h5 @@ -328,7 +328,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_g_iterate.h5 @@ -341,7 +341,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_g_phase.h5 @@ -366,7 +366,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_g_visit.h5 @@ -388,9 +388,9 @@ FORTRAN Read / Write Array (Attribute) C -FORTRAN +FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_t_arrayatt.h5 @@ -401,9 +401,9 @@ FORTRAN Read / Write Array (Dataset) C -FORTRAN +FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_t_array.h5 @@ -414,9 +414,9 @@ FORTRAN Read / Write Bitfield (Attribute) C -FORTRAN +FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_t_bitatt.h5 @@ -427,9 +427,9 @@ FORTRAN Read / Write Bitfield (Dataset) C -FORTRAN +FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_t_bit.h5 @@ -440,9 +440,9 @@ FORTRAN Read / Write Compound (Attribute) C -FORTRAN +FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_t_cmpdatt.h5 @@ -453,9 +453,9 @@ FORTRAN Read / Write Compound (Dataset) C -FORTRAN +FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_t_cmpd.h5 @@ -468,7 +468,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_t_commit.h5 @@ -533,7 +533,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_t_floatatt.h5 @@ -546,7 +546,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_t_float.h5 @@ -559,7 +559,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_t_intatt.h5 @@ -572,7 +572,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_t_int.h5 @@ -585,7 +585,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_t_objrefatt.h5 @@ -598,7 +598,7 @@ FORTRAN C FORTRAN Java - JavaObj + JavaObj MATLAB PyHigh PyLow h5ex_t_objref.h5 @@ -611,7 +611,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_t_opaqueatt.h5 @@ -624,7 +624,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_t_opaque.h5 @@ -637,7 +637,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_t_regrefatt.h5 @@ -650,7 +650,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_t_regref.h5 @@ -661,9 +661,9 @@ FORTRAN Read / Write String (Attribute) C -FORTRAN +FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_t_stringatt.h5 @@ -676,7 +676,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_t_string.h5 @@ -709,8 +709,7 @@ FORTRAN Read / Write Variable Length String (Attribute) C -FORTRAN - Java JavaObj MATLAB PyHigh PyLow + FORTRAN Java JavaObj MATLAB PyHigh PyLow h5ex_t_vlstringatt.h5 h5ex_t_vlstringatt.tst @@ -722,7 +721,7 @@ FORTRAN C FORTRAN Java -JavaObj +JavaObj MATLAB PyHigh PyLow h5ex_t_vlstring.h5 @@ -843,7 +842,7 @@ FORTRAN Create/Read/Write an Attribute Java -JavaObj +JavaObj HDF5AttributeCreate.txt @@ -851,7 +850,7 @@ FORTRAN Create Datasets Java -JavaObj +JavaObj HDF5DatasetCreate.txt @@ -859,7 +858,7 @@ FORTRAN Read/Write Datasets Java -JavaObj +JavaObj HDF5DatasetRead.txt @@ -867,7 +866,7 @@ FORTRAN Create an Empty File Java -JavaObj +JavaObj HDF5FileCreate.txt @@ -883,9 +882,9 @@ FORTRAN Create Groups Java -JavaObj +JavaObj -HDF5GroupCreate.txt +HDF5GroupCreate.txt Select a Subset of a Dataset @@ -899,9 +898,9 @@ FORTRAN Create Two Datasets Within Groups Java -JavaObj +JavaObj -HDF5GroupDatasetCreate.txt +HDF5GroupDatasetCreate.txt @@ -918,7 +917,7 @@ FORTRAN Creating and Accessing a File C -FORTRAN +FORTRAN MATLAB PyHigh PyLow ph5_.h5 @@ -928,7 +927,7 @@ FORTRAN Creating and Accessing a Dataset C -FORTRAN +FORTRAN MATLAB PyHigh PyLow ph5_.h5 @@ -938,7 +937,7 @@ FORTRAN Writing and Reading Contiguous Hyperslabs C -FORTRAN +FORTRAN MATLAB PyHigh PyLow ph5_.h5 @@ -948,7 +947,7 @@ FORTRAN Writing and Reading Regularly Spaced Data Hyperslabs C -FORTRAN +FORTRAN MATLAB PyHigh PyLow ph5_.h5 @@ -958,7 +957,7 @@ FORTRAN Writing and Reading Pattern Hyperslabs C -FORTRAN +FORTRAN MATLAB PyHigh PyLow ph5_.h5 @@ -968,7 +967,7 @@ FORTRAN Writing and Reading Chunk Hyperslabs C -FORTRAN +FORTRAN MATLAB PyHigh PyLow ph5_.h5 @@ -978,7 +977,8 @@ FORTRAN Using the Subfiling VFD to Write a File Striped Across Multiple Subfiles C - FORTRAN MATLAB PyHigh PyLow +FORTRAN + MATLAB PyHigh PyLow ph5_.h5 ph5_.tst @@ -996,7 +996,8 @@ FORTRAN Collectively Write Datasets with Filters and Not All Ranks have Data C - FORTRAN MATLAB PyHigh PyLow +FORTRAN + MATLAB PyHigh PyLow ph5_.h5 ph5_.tst diff --git a/doxygen/dox/GettingStarted.dox b/doxygen/dox/GettingStarted.dox index aa81ca28744..274598c9537 100644 --- a/doxygen/dox/GettingStarted.dox +++ b/doxygen/dox/GettingStarted.dox @@ -38,7 +38,7 @@ Step by step instructions for learning HDF5 that include programming examples \subsection subsec_learn_tutor The HDF Group Tutorials and Examples These tutorials and examples are available for learning about the HDF5 High Level APIs, tools, -Parallel HDF5, and the HDF5-1.10 VDS and SWMR new features: +Parallel HDF5, and the VDS and SWMR features: - @@ -91,7 +91,7 @@ These examples (C, C++, Fortran, Java, Python) are provided in the HDF5 source c - @@ -107,7 +107,7 @@ These examples (C, C++, Fortran, Java, Python) are provided in the HDF5 source c - @@ -131,7 +131,7 @@ These examples (C, C++, Fortran, Java, Python) are provided in the HDF5 source c - diff --git a/doxygen/dox/LearnBasics3.dox b/doxygen/dox/LearnBasics3.dox index 3e9dd8ea090..d853c83d742 100644 --- a/doxygen/dox/LearnBasics3.dox +++ b/doxygen/dox/LearnBasics3.dox @@ -183,7 +183,7 @@ to a new with a new layout. \section secLBDsetLayoutSource Sources of Information Chunking in HDF5 (See the documentation on Advanced Topics in HDF5) -\see \ref sec_plist in the HDF5 \ref UG. +see \ref sec_plist in the HDF5 \ref UG.
Previous Chapter \ref LBPropsList - Next Chapter \ref LBExtDset @@ -251,7 +251,7 @@ The following operations are required in order to create a compressed dataset: \li Create the dataset. \li Close the dataset creation property list and dataset. -For more information on compression, see the FAQ question on Using Compression in HDF5. +For more information on compression, see the FAQ question on Using Compression in HDF5. \section secLBComDsetProg Programming Example @@ -720,7 +720,7 @@ Previous Chapter \ref LBQuiz - Next Chapter \ref LBCompiling Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics -/** @page LBCompiling Compiling HDF5 Applications +@page LBCompiling Compiling HDF5 Applications Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics
@@ -969,13 +969,13 @@ or on WINDOWS you may need to add the path to the bin folder to PATH. \subsection subsecLBCompilingCMakeScripts CMake Scripts for Building Applications Simple scripts are provided for building applications with different languages and options. -See CMake Scripts for Building Applications. +See CMake Scripts for Building Applications. For a more complete script (and to help resolve issues) see the script provided with the HDF5 Examples project. \subsection subsecLBCompilingCMakeExamples HDF5 Examples The installed HDF5 can be verified by compiling the HDF5 Examples project, included with the CMake built HDF5 binaries -in the share folder or you can go to the HDF5 Examples github repository. +in the share folder or you can go to the HDF5 Examples in the HDF5 github repository. Go into the share directory and follow the instructions in USING_CMake_examples.txt to build the examples. @@ -1035,9 +1035,11 @@ Previous Chapter \ref LBQuizAnswers - Next Chapter \ref LBTraining Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics -*/ +@page LBTraining Training Videos + +Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics -/ref LBTraining +Training Videos
Navigate back: \ref index "Main" / \ref GettingStarted / \ref LearnBasics diff --git a/doxygen/dox/LearnHDFView.dox b/doxygen/dox/LearnHDFView.dox index 2f0a0782e60..cfe11e19137 100644 --- a/doxygen/dox/LearnHDFView.dox +++ b/doxygen/dox/LearnHDFView.dox @@ -7,7 +7,7 @@ This tutorial enables you to get a feel for HDF5 by using the HDFView browser. I any programming experience. \section sec_learn_hv_install HDFView Installation -\li Download and install HDFView. It can be downloaded from the Download HDFView page. +\li Download and install HDFView. It can be downloaded from the Download HDFView page. \li Obtain the storm1.txt text file, used in the tutorial. \section sec_learn_hv_begin Begin Tutorial @@ -246,7 +246,7 @@ in the file). Please note that the chunk sizes used in this topic are for demonstration purposes only. For information on chunking and specifying an appropriate chunk size, see the -Chunking in HDF5 documentation. +Chunking in HDF5 documentation. Also see the HDF5 Tutorial topic on \ref secLBComDsetCreate.
@@ -68,7 +68,7 @@ A brief introduction to Parallel HDF5. If you are new to HDF5 please see the @re
-HDF5-1.10 New Features +New Features since HDF5-1.10 \li \ref VDS diff --git a/doxygen/dox/IntroHDF5.dox b/doxygen/dox/IntroHDF5.dox index 28c419b0c3b..6f3938ed8b2 100644 --- a/doxygen/dox/IntroHDF5.dox +++ b/doxygen/dox/IntroHDF5.dox @@ -262,7 +262,7 @@ FORTRAN routines are similar; they begin with “h5*” and end with “_f”.
  • Java routines are similar; the routine names begin with “H5*” and are prefixed with “H5.” as the class. Constants are in the HDF5Constants class and are prefixed with "HDF5Constants.". The function arguments -are usually similar, @see @ref HDF5LIB +are usually similar, see @ref HDF5LIB
  • For example: @@ -616,8 +616,8 @@ on the HDF-EOS Tools and Information Center pag \section secHDF5Examples Examples \li \ref LBExamples \li \ref ExAPI -\li Examples in the Source Code -\li Other Examples +\li Examples in the Source Code +\li Other Examples \section secHDF5ExamplesCompile How To Compile For information on compiling in C, C++ and Fortran, see: \ref LBCompiling diff --git a/doxygen/dox/IntroParHDF5.dox b/doxygen/dox/IntroParHDF5.dox index b8785d43c9d..58a6e7958b0 100644 --- a/doxygen/dox/IntroParHDF5.dox +++ b/doxygen/dox/IntroParHDF5.dox @@ -96,6 +96,8 @@ Once a file is opened by the processes of a communicator: \li Multiple processes write to the same dataset. \li Each process writes to an individual dataset. +@see \ref collective_calls + Please refer to the Supported Configuration Features Summary in the release notes for the current release of HDF5 for an up-to-date list of the platforms that we support Parallel HDF5 on. diff --git a/doxygen/dox/LearnBasics.dox b/doxygen/dox/LearnBasics.dox index ed83b367b6b..4db515c1a57 100644 --- a/doxygen/dox/LearnBasics.dox +++ b/doxygen/dox/LearnBasics.dox @@ -59,7 +59,7 @@ These examples (C, C++, Fortran, Java, Python) are provided in the HDF5 source c
    Create a file C Fortran C++ Java Python +C Fortran C++ Java Python
    Create a group C Fortran C++ Java Python +C Fortran C++ Java Python
    Create datasets in a group C Fortran C++ Java Python +C Fortran C++ Java Python
    Create a chunked and compressed dataset C Fortran C++ Java Python +C Fortran C++ Java Python
    - - - - - - - - - - - - - - - - - - -
    Values for H5Z_filter_tDescription
    0-255These values are reserved for filters predefined and - registered by the HDF5 library and of use to the general - public. They are described in a separate section - below.
    256-511Filter numbers in this range are used for testing only - and can be used temporarily by any organization. No - attempt is made to resolve numbering conflicts since all - definitions are by nature temporary.
    512-65535Reserved for future assignment. Please contact the - HDF5 development team - to reserve a value or range of values for - use by your filters.
    - -

    Defining and Querying the Filter Pipeline

    - -

    Two types of filters can be applied to raw data I/O: permanent - filters and transient filters. The permanent filter pipeline is - defined when the dataset is created while the transient pipeline - is defined for each I/O operation. During an - H5Dwrite() the transient filters are applied first - in the order defined and then the permanent filters are applied - in the order defined. For an H5Dread() the - opposite order is used: permanent filters in reverse order, then - transient filters in reverse order. An H5Dread() - must result in the same amount of data for a chunk as the - original H5Dwrite(). - -

    The permanent filter pipeline is defined by calling - H5Pset_filter() for a dataset creation property - list while the transient filter pipeline is defined by calling - that function for a dataset transfer property list. - -

    -
    herr_t H5Pset_filter (hid_t plist, - H5Z_filter_t filter, unsigned int flags, - size_t cd_nelmts, const unsigned int - cd_values[]) -
    This function adds the specified filter and - corresponding properties to the end of the transient or - permanent output filter pipeline (depending on whether - plist is a dataset creation or dataset transfer - property list). The flags argument specifies certain - general properties of the filter and is documented below. The - cd_values is an array of cd_nelmts integers - which are auxiliary data for the filter. The integer values - will be stored in the dataset object header as part of the - filter information. -
    int H5Pget_nfilters (hid_t plist) -
    This function returns the number of filters defined in the - permanent or transient filter pipeline depending on whether - plist is a dataset creation or dataset transfer - property list. In each pipeline the filters are numbered from - 0 through N-1 where N is the value returned - by this function. During output to the file the filters of a - pipeline are applied in increasing order (the inverse is true - for input). Zero is returned if there are no filters in the - pipeline and a negative value is returned for errors. -
    H5Z_filter_t H5Pget_filter (hid_t plist, - int filter_number, unsigned int *flags, - size_t *cd_nelmts, unsigned int - *cd_values, size_t namelen, char name[]) -
    This is the query counterpart of - H5Pset_filter() and returns information about a - particular filter number in a permanent or transient pipeline - depending on whether plist is a dataset creation or - dataset transfer property list. On input, cd_nelmts - indicates the number of entries in the cd_values - array allocated by the caller while on exit it contains the - number of values defined by the filter. The - filter_number should be a value between zero and - N-1 as described for H5Pget_nfilters() - and the function will return failure (a negative value) if the - filter number is out of range. If name is a pointer - to an array of at least namelen bytes then the filter - name will be copied into that array. The name will be null - terminated if the namelen is large enough. The - filter name returned will be the name appearing in the file or - else the name registered for the filter or else an empty string. -
    - -

    The flags argument to the functions above is a bit vector of - the following fields: - -

    - - - - - - - - - - -
    Values for flagsDescription
    H5Z_FLAG_OPTIONALIf this bit is set then the filter is optional. If - the filter fails (see below) during an - H5Dwrite() operation then the filter is - just excluded from the pipeline for the chunk for which - it failed; the filter will not participate in the - pipeline during an H5Dread() of the chunk. - This is commonly used for compression filters: if the - compression result would be larger than the input then - the compression filter returns failure and the - uncompressed data is stored in the file. If this bit is - clear and a filter fails then the - H5Dwrite() or H5Dread() also - fails.
    - -

    Defining Filters

    - -

    Each filter is bidirectional, handling both input and output to - the file, and a flag is passed to the filter to indicate the - direction. In either case the filter reads a chunk of data from - a buffer, usually performs some sort of transformation on the - data, places the result in the same or new buffer, and returns - the buffer pointer and size to the caller. If something goes - wrong the filter should return zero to indicate a failure. - -

    During output, a filter that fails or isn't defined and is - marked as optional is silently excluded from the pipeline and - will not be used when reading that chunk of data. A required - filter that fails or isn't defined causes the entire output - operation to fail. During input, any filter that has not been - excluded from the pipeline during output and fails or is not - defined will cause the entire input operation to fail. - -

    Filters are defined in two phases. The first phase is to - define a function to act as the filter and link the function - into the application. The second phase is to register the - function, associating the function with an - H5Z_filter_t identification number and a comment. - -

    -
    typedef size_t (*H5Z_func_t)(unsigned int - flags, size_t cd_nelmts, const unsigned int - cd_values[], size_t nbytes, size_t - *buf_size, void **buf) -
    The flags, cd_nelmts, and - cd_values are the same as for the - H5Pset_filter() function with the additional flag - H5Z_FLAG_REVERSE which is set when the filter is - called as part of the input pipeline. The input buffer is - pointed to by *buf and has a total size of - *buf_size bytes but only nbytes are valid - data. The filter should perform the transformation in place if - possible and return the number of valid bytes or zero for - failure. If the transformation cannot be done in place then - the filter should allocate a new buffer with - malloc() and assign it to *buf, - assigning the allocated size of that buffer to - *buf_size. The old buffer should be freed - by calling free(). - -

    -
    herr_t H5Zregister (H5Z_filter_t filter_id, - const char *comment, H5Z_func_t - filter) -
    The filter function is associated with a filter - number and a short ASCII comment which will be stored in the - hdf5 file if the filter is used as part of a permanent - pipeline during dataset creation. -
    - -

    Predefined Filters

    - -

    If zlib version 1.1.2 or later was found - during configuration then the library will define a filter whose - H5Z_filter_t number is - H5Z_FILTER_DEFLATE. Since this compression method - has the potential for generating compressed data which is larger - than the original, the H5Z_FLAG_OPTIONAL flag - should be turned on so such cases can be handled gracefully by - storing the original data instead of the compressed data. The - cd_nvalues should be one with cd_value[0] - being a compression aggression level between zero and nine, - inclusive (zero is the fastest compression while nine results in - the best compression ratio). - -

    A convenience function for adding the - H5Z_FILTER_DEFLATE filter to a pipeline is: - -

    -
    herr_t H5Pset_deflate (hid_t plist, unsigned - aggression) -
    The deflate compression method is added to the end of the - permanent or transient filter pipeline depending on whether - plist is a dataset creation or dataset transfer - property list. The aggression is a number between - zero and nine (inclusive) to indicate the tradeoff between - speed and compression ratio (zero is fastest, nine is best - ratio). -
    - -

    Even if the zlib isn't detected during - configuration the application can define - H5Z_FILTER_DEFLATE as a permanent filter. If the - filter is marked as optional (as with - H5Pset_deflate()) then it will always fail and be - automatically removed from the pipeline. Applications that read - data will fail only if the data is actually compressed; they - won't fail if H5Z_FILTER_DEFLATE was part of the - permanent output pipeline but was automatically excluded because - it didn't exist when the data was written. - -

    zlib can be acquired from - - https://zlib.net. - -

    Example

    - -

    This example shows how to define and register a simple filter - that adds a checksum capability to the data stream. - -

    The function that acts as the filter always returns zero - (failure) if the md5() function was not detected at - configuration time (left as an exercise for the reader). - Otherwise the function is broken down to an input and output - half. The output half calculates a checksum, increases the size - of the output buffer if necessary, and appends the checksum to - the end of the buffer. The input half calculates the checksum - on the first part of the buffer and compares it to the checksum - already stored at the end of the buffer. If the two differ then - zero (failure) is returned, otherwise the buffer size is reduced - to exclude the checksum. - -

    - - - - -
    -

    
    -                  size_t
    -                  md5_filter(unsigned int flags, size_t cd_nelmts,
    -                  const unsigned int cd_values[], size_t nbytes,
    -                  size_t *buf_size, void **buf)
    -                  {
    -                  #ifdef HAVE_MD5
    -                  unsigned char       cksum[16];
    -
    -                  if (flags & H5Z_REVERSE) {
    -                  /* Input */
    -                  assert(nbytes>=16);
    -                  md5(nbytes-16, *buf, cksum);
    -
    -                  /* Compare */
    -                  if (memcmp(cksum, (char*)(*buf)+nbytes-16, 16)) {
    -                  return 0; /*fail*/
    -                  }
    -
    -                  /* Strip off checksum */
    -                  return nbytes-16;
    -
    -                  } else {
    -                  /* Output */
    -                  md5(nbytes, *buf, cksum);
    -
    -                  /* Increase buffer size if necessary */
    -                  if (nbytes+16>*buf_size) {
    -                  *buf_size = nbytes + 16;
    -                  *buf = realloc(*buf, *buf_size);
    -                  }
    -
    -                  /* Append checksum */
    -                  memcpy((char*)(*buf)+nbytes, cksum, 16);
    -                  return nbytes+16;
    -                  }
    -                  #else
    -                  return 0; /*fail*/
    -                  #endif
    -                  }
    -	          
    -
    - -

    Once the filter function is defined it must be registered so - the HDF5 library knows about it. Since we're testing this - filter we choose one of the H5Z_filter_t numbers - from the reserved range. We'll randomly choose 305. - -

    -

    - - - - -
    -

    
    -                  #define FILTER_MD5 305
    -                  herr_t status = H5Zregister(FILTER_MD5, "md5 checksum", md5_filter);
    -	          
    -
    - -

    Now we can use the filter in a pipeline. We could have added - the filter to the pipeline before defining or registering the - filter as long as the filter was defined and registered by time - we tried to use it (if the filter is marked as optional then we - could have used it without defining it and the library would - have automatically removed it from the pipeline for each chunk - written before the filter was defined and registered). - -

    -

    - - - - -
    -

    
    -                  hid_t dcpl = H5Pcreate(H5P_DATASET_CREATE);
    -                  hsize_t chunk_size[3] = {10,10,10};
    -                  H5Pset_chunk(dcpl, 3, chunk_size);
    -                  H5Pset_filter(dcpl, FILTER_MD5, 0, 0, NULL);
    -                  hid_t dset = H5Dcreate(file, "dset", H5T_NATIVE_DOUBLE, space, dcpl);
    -	          
    -
    - -

    6. Filter Diagnostics

    - -

    If the library is compiled with debugging turned on for the H5Z - layer (usually as a result of configure - --enable-debug=z) then filter statistics are printed when - the application exits normally or the library is closed. The - statistics are written to the standard error stream and include - two lines for each filter that was used: one for input and one - for output. The following fields are displayed: - -

    -

    - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
    Field NameDescription
    MethodThis is the name of the method as defined with - H5Zregister() with the characters - "< or ">" prepended to indicate - input or output.
    TotalThe total number of bytes processed by the filter - including errors. This is the maximum of the - nbytes argument or the return value. -
    ErrorsThis field shows the number of bytes of the Total - column which can be attributed to errors.
    User, System, ElapsedThese are the amount of user time, system time, and - elapsed time in seconds spent in the filter function. - Elapsed time is sensitive to system load. These times - may be zero on operating systems that don't support the - required operations.
    BandwidthThis is the filter bandwidth which is the total - number of bytes processed divided by elapsed time. - Since elapsed time is subject to system load the - bandwidth numbers cannot always be trusted. - Furthermore, the bandwidth includes bytes attributed to - errors which may significantly taint the value if the - function is able to detect errors without much - expense.
    - -

    -

    - - - - - -
    - Example: Filter Statistics -
    -

    H5Z: filter statistics accumulated ov=
    -                  er life of library:
    -                  Method     Total  Errors  User  System  Elapsed Bandwidth
    -                  ------     -----  ------  ----  ------  ------- ---------
    -                  >deflate  160000   40000  0.62    0.74     1.33 117.5 kBs
    -                  <deflate  120000       0  0.11    0.00     0.12 1.000 MBs
    -	          
    -
    - -
    - - -

    Footnote 1: Dataset chunks can be compressed - through the use of filters. Developers should be aware that - reading and rewriting compressed chunked data can result in holes - in an HDF5 file. In time, enough such holes can increase the - file size enough to impair application or library performance - when working with that file. See - - Freespace Management - in the chapter - - Performance Analysis and Issues.

    - diff --git a/doxygen/examples/H5.format.1.0.html b/doxygen/examples/H5.format.1.0.html index 32e377d4323..00da963c48e 100644 --- a/doxygen/examples/H5.format.1.0.html +++ b/doxygen/examples/H5.format.1.0.html @@ -3441,8 +3441,8 @@

    Name: Data Storage - Filter Pipeline

    library. Values 256 through 511 have been set aside for use when developing/testing new filters. The remaining values are allocated to specific filters by contacting the - HDF5 Development - Team. + HDF5 development team. + diff --git a/doxygen/examples/H5.format.1.1.html b/doxygen/examples/H5.format.1.1.html index 707bdc7c281..418afd5ab88 100644 --- a/doxygen/examples/H5.format.1.1.html +++ b/doxygen/examples/H5.format.1.1.html @@ -5558,9 +5558,9 @@

    Name: Data Storage - Filter Pipeline

    1If you are reading an earlier version of this document, this link may have changed. If the link does not work, use the latest version of this document - on The HDF Group’s website, - - https://support.hdfgroup.org/HDF5/doc/H5.format.html; + on The HDF Group’s website, + + H5.format.html; the link there will always be correct. (Return)

    diff --git a/doxygen/examples/H5DS_Spec.pdf b/doxygen/examples/H5DS_Spec.pdf new file mode 100644 index 00000000000..813f4ded3e1 Binary files /dev/null and b/doxygen/examples/H5DS_Spec.pdf differ diff --git a/doxygen/examples/IOFlow.html b/doxygen/examples/IOFlow.html index e890edbb766..b33196d502a 100644 --- a/doxygen/examples/IOFlow.html +++ b/doxygen/examples/IOFlow.html @@ -1,5 +1,4 @@ - HDF5 Raw I/O Flow Notes diff --git a/doxygen/examples/LibraryReleaseVersionNumbers.html b/doxygen/examples/LibraryReleaseVersionNumbers.html index 57b211cd61b..dedbece0c11 100644 --- a/doxygen/examples/LibraryReleaseVersionNumbers.html +++ b/doxygen/examples/LibraryReleaseVersionNumbers.html @@ -241,7 +241,7 @@

    Version Support from the Library<

    For more information on these and other function calls and macros, - see the HDF5 Reference Manual.

    + see the HDF5 Reference Manual.

    Use Cases

    diff --git a/doxygen/examples/intro_SWMR.html b/doxygen/examples/intro_SWMR.html deleted file mode 100644 index b1adb62bdb5..00000000000 --- a/doxygen/examples/intro_SWMR.html +++ /dev/null @@ -1,103 +0,0 @@ - - - Introduction to Single-Writer_Multiple-Reader (SWMR) - -

    Introduction to SWMR

    -

    The Single-Writer / Multiple-Reader (SWMR) feature enables multiple processes to read an HDF5 file while it is being written to (by a single process) without using locks or requiring communication between processes.

    -

    tutr-swmr1.png -

    All communication between processes must be performed via the HDF5 file. The HDF5 file under SWMR access must reside on a system that complies with POSIX write() semantics.

    -

    The basic engineering challenge for this to work was to ensure that the readers of an HDF5 file always see a coherent (though possibly not up to date) HDF5 file.

    -

    The issue is that when writing data there is information in the metadata cache in addition to the physical file on disk:

    -

    tutr-swmr2.png -

    However, the readers can only see the state contained in the physical file:

    -

    tutr-swmr3.png -

    The SWMR solution implements dependencies on when the metadata can be flushed to the file. This ensures that metadata cache flush operations occur in the proper order, so that there will never be internal file pointers in the physical file that point to invalid (unflushed) file addresses.

    -

    A beneficial side effect of using SWMR access is better fault tolerance. It is more difficult to corrupt a file when using SWMR.

    -

    Documentation

    -

    SWMR User's Guide

    -

    HDF5 Library APIs

    -
      -
    • H5F_START_SWMR_WRITE — Enables SWMR writing mode for a file
    • -
    • H5DO_APPEND — Appends data to a dataset along a specified dimension
    • -
    • H5P_SET_OBJECT_FLUSH_CB — Sets a callback function to invoke when an object flush occurs in the file
    • -
    • H5P_GET_OBJECT_FLUSH_CB — Retrieves the object flush property values from the file access property list
    • -
    • H5O_DISABLE_MDC_FLUSHES — Prevents metadata entries for an HDF5 object from being flushed from the metadata cache to storage
    • -
    • H5O_ENABLE_MDC_FLUSHES — Enables flushing of dirty metadata entries from a file’s metadata cache
    • -
    • H5O_ARE_MDC_FLUSHES_DISABLED — Determines if an HDF5 object has had flushes of metadata entries disabled
    • -
    -

    Tools

    -
      -
    • h5watch — Outputs new records appended to a dataset as the dataset grows
    • -
    • h5format_convert — Converts the layout format version and chunked indexing types of datasets created with HDF5-1.10 so that applications built with HDF5-1.8 can access them
    • -
    • h5clear — Clears superblock status_flags field, removes metadata cache image, prints EOA and EOF, or sets EOA of a file
    • -
    -

    Design Documents

    -

    Error while fetching page properties report data:

    -

    Programming Model

    -

    Please be aware that the SWMR feature requires that an HDF5 file be created with the latest file format. See H5P_SET_LIBVER_BOUNDS for more information.

    -

    To use SWMR follow the the general programming model for creating and accessing HDF5 files and objects along with the steps described below.

    -

    SWMR Writer:

    -

    The SWMR writer either opens an existing file and objects or creates them as follows.

    -

    Open an existing file:

    -

    Call H5Fopen using the H5F_ACC_SWMR_WRITE flag. -Begin writing datasets. -Periodically flush data. -Create a new file:

    -

    Call H5Fcreate using the latest file format. -Create groups, datasets and attributes, and then close the attributes. -Call H5F_START_SWMR_WRITE to start SWMR access to the file. -Periodically flush data.

    -

    Example Code:

    -

    Create the file using the latest file format property:

    -

    - fapl = H5Pcreate (H5P_FILE_ACCESS); - status = H5Pset_libver_bounds (fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST); - fid = H5Fcreate (filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl); -[Create objects (files, datasets, ...). Close any attributes and named datatype objects. Groups and datasets may remain open before starting SWMR access to them.]

    -

    Start SWMR access to the file:

    -

    status = H5Fstart_swmr_write (fid); -Reopen the datasets and start writing, periodically flushing data:

    -

    status = H5Dwrite (dset_id, ...); - status = H5Dflush (dset_id);

    -

    SWMR Reader:

    -

    The SWMR reader must continually poll for new data:

    -

    Call H5Fopen using the H5F_ACC_SWMR_READ flag. -Poll, checking the size of the dataset to see if there is new data available for reading. -Read new data, if any.

    -

    Example Code:

    -

    Open the file using the SWMR read flag:

    -

    fid = H5Fopen (filename, H5F_ACC_RDONLY | H5F_ACC_SWMR_READ, H5P_DEFAULT); -Open the dataset and then repeatedly poll the dataset, by getting the dimensions, reading new data, and refreshing:

    -

    dset_id = H5Dopen (...); - space_id = H5Dget_space (...); - while (...) { - status = H5Dread (dset_id, ...); - status = H5Drefresh (dset_id); - space_id = H5Dget_space (...); - }

    -

    Limitations and Scope

    -

    An HDF5 file under SWMR access must reside on a system that complies with POSIX write() semantics. It is also limited in scope as follows:

    -

    The writer process is only allowed to modify raw data of existing datasets by;

    -

    Appending data along any unlimited dimension. -Modifying existing data -The following operations are not allowed (and the corresponding HDF5 files will fail):

    -

    The writer cannot add new objects to the file. -The writer cannot delete objects in the file. -The writer cannot modify or append data with variable length, string or region reference datatypes. -File space recycling is not allowed. As a result the size of a file modified by a SWMR writer may be larger than a file modified by a non-SWMR writer.

    -

    Tools for Working with SWMR

    -

    Two new tools, h5watch and h5clear, are available for use with SWMR. The other HDF5 utilities have also been modified to recognize SWMR:

    -

    The h5watch tool allows a user to monitor the growth of a dataset. -The h5clear tool clears the status flags in the superblock of an HDF5 file. -The rest of the HDF5 tools will exit gracefully but not work with SWMR otherwise.

    -

    Programming Example

    -

    A good example of using SWMR is included with the HDF5 tests in the source code. You can run it while reading the file it creates. If you then interrupt the application and reader and look at the resulting file, you will see that the file is still valid. Follow these steps:

    -

    Download the HDF5-1.10 source code to a local directory on a filesystem (that complies with POSIX write() semantics). Build the software. No special configuration options are needed to use SWMR.

    -

    Invoke two command terminal windows. In one window go into the bin/ directory of the built binaries. In the other window go into the test/ directory of the HDF5-1.10 source code that was just built.

    -

    In the window in the test/ directory compile and run use_append_chunk.c. The example writes a three dimensional dataset by planes (with chunks of size 1 x 256 x 256).

    -

    In the other window (in the bin/ directory) run h5watch on the file created by use_append_chunk.c (use_append_chunk.h5). It should be run while use_append_chunk is executing and you will see valid data displayed with h5watch.

    -

    Interrupt use_append_chunk while it is running, and stop h5watch.

    -

    Use h5clear to clear the status flags in the superblock of the HDF5 file (use_append_chunk.h5).

    -

    View the file with h5dump. You will see that it is a valid file even though the application did not close properly. It will contain data up to the point that it was interrupted.

    - - diff --git a/doxygen/examples/intro_VDS.html b/doxygen/examples/intro_VDS.html deleted file mode 100644 index 6e573b9b75c..00000000000 --- a/doxygen/examples/intro_VDS.html +++ /dev/null @@ -1,72 +0,0 @@ - - - Introduction to the Virtual Dataset - VDS - -

    The HDF5 Virtual Dataset (VDS) feature enables users to access data in a collection of HDF5 files as a single HDF5 dataset and to use the HDF5 APIs to work with that dataset.

    -

    For example, your data may be collected into four files:

    - -

    tutrvds-multimgs.png - -

    You can map the datasets in the four files into a single VDS that can be accessed just like any other dataset:

    - -

    tutrvds-snglimg.png - -

    The mapping between a VDS and the HDF5 source datasets is persistent and transparent to an application. If a source file is missing the fill value will be displayed.

    -

    See the Virtual (VDS) Documentation for complete details regarding the VDS feature.

    -

    The VDS feature was implemented using hyperslab selection (H5S_SELECT_HYPERSLAB). See the tutorial on Reading From or Writing to a Subset of a Dataset for more information on selecting hyperslabs.

    -

    Programming Model -To create a Virtual Dataset you simply follow the HDF5 programming model and add a few additional API calls to map the source code datasets to the VDS.

    -

    Following are the steps for creating a Virtual Dataset:

    -

    Create the source datasets that will comprise the VDS -Create the VDS: ‐ Define a datatype and dataspace (can be unlimited) -‐ Define the dataset creation property list (including fill value) -‐ (Repeat for each source dataset) Map elements from the source dataset to elements of the VDS: -Select elements in the source dataset (source selection) -Select elements in the virtual dataset (destination selection) -Map destination selections to source selections (see Functions for Working with a VDS)

    -

    ‐ Call H5Dcreate using the properties defined above -Access the VDS as a regular HDF5 dataset -Close the VDS when finished

    -

    Functions for Working with a VDS -The H5P_SET_VIRTUAL API sets the mapping between virtual and source datasets. This is a dataset creation property list. Using this API will change the layout of the dataset to H5D_VIRTUAL. As with specifying any dataset creation property list, an instance of the property list is created, modified, passed into the dataset creation call and then closed:

    -

    dcpl = H5Pcreate (H5P_DATASET_CREATE);

    -

    src_space = H5screate_simple ... - status = H5Sselect_hyperslab (space, ... - status = H5Pset_virtual (dcpl, space, SRC_FILE[i], SRC_DATASET[i], src_space);

    -

    dset = H5Dcreate2 (file, DATASET, H5T_NATIVE_INT, space, H5P_DEFAULT, dcpl, H5P_DEFAULT);

    -

    status = H5Pclose (dcpl); -There are several other APIs introduced with Virtual Datasets, including query functions. For details see the complete list of HDF5 library APIs that support Virtual Datasets

    -

    Limitations -This feature requires HDF5-1.10. -The number of source datasets is unlimited. However, there is a limit on the size of each source dataset.

    -

    Programming Examples -Example 1 -This example creates three HDF5 files, each with a one-dimensional dataset of 6 elements. The datasets in these files are the source datasets that are then used to create a 4 x 6 Virtual Dataset with a fill value of -1. The first three rows of the VDS are mapped to the data from the three source datasets as shown below:

    -

    tutrvds-ex.png

    -

    In this example the three source datasets are mapped to the VDS with this code:

    -
    src\_space = H5Screate\_simple (RANK1, dims, NULL);
    -for (i = 0; i < 3; i++) {
    -    start[0] = (hsize\_t)i;
    -    /* Select i-th row in the virtual dataset; selection in the source datasets is the same. */
    -    status = H5Sselect\_hyperslab (space, H5S\_SELECT\_SET, start, NULL, count, block);
    -    status = H5Pset\_virtual (dcpl, space, SRC\_FILE[i], SRC\_DATASET[i], src\_space);
    -}
    -
    -

    After the VDS is created and closed, it is reopened. The property list is then queried to determine the layout of the dataset and its mappings, and the data in the VDS is read and printed.

    -

    This example is in the HDF5 source code and can be obtained from here:

    -

    C Example

    -

    For details on compiling an HDF5 application: [ Compiling HDF5 Applications ]

    -

    Example 2 -This example shows how to use a C-style printf statement for specifying multiple source datasets as one virtual dataset. Only one mapping is required. In other words only one H5P_SET_VIRTUAL call is needed to map multiple datasets. It creates a 2-dimensional unlimited VDS. Then it re-opens the file, makes queries, and reads the virtual dataset.

    -

    The source datasets are specified as A-0, A-1, A-2, and A-3. These are mapped to the virtual dataset with one call:

    -
    status = H5Pset\_virtual (dcpl, vspace, SRCFILE, "/A-%b", src\_space);
    -
    -

    The %b indicates that the block count of the selection in the dimension should be used.

    -

    C Example

    -

    For details on compiling an HDF5 application: [ Compiling HDF5 Applications ]

    -

    Using h5dump with a VDS -The h5dump utility can be used to view a VDS. The h5dump output for a VDS looks exactly like that for any other dataset. If h5dump cannot find a source dataset then the fill value will be displayed.

    -

    You can determine that a dataset is a VDS by looking at its properties with h5dump -p. It will display each source dataset mapping, beginning with Mapping 0. Below is an excerpt of the output of h5dump -p on the vds.h5 file created in Example 1.You can see that the entire source file a.h5 is mapped to the first row of the /VDS dataset:

    - -

    tutrvds-map.png

    - diff --git a/doxygen/examples/tables/propertyLists.dox b/doxygen/examples/tables/propertyLists.dox index 340e13c26a5..76727b58a59 100644 --- a/doxygen/examples/tables/propertyLists.dox +++ b/doxygen/examples/tables/propertyLists.dox @@ -490,12 +490,12 @@ and one raw data file. #H5Pget_filter Returns information about a filter in a pipeline. -The C function is a macro: \see \ref api-compat-macros. +The C function is a macro: @see @ref api-compat-macros. #H5Pget_filter_by_id Returns information about the specified filter. -The C function is a macro: \see \ref api-compat-macros. +The C function is a macro: @see @ref api-compat-macros. #H5Pmodify_filter @@ -739,12 +739,12 @@ of the library for reading or writing the actual data. #H5Pget_filter Returns information about a filter in a pipeline. The -C function is a macro: \see \ref api-compat-macros. +C function is a macro: @see @ref api-compat-macros. #H5Pget_filter_by_id Returns information about the specified filter. The -C function is a macro: \see \ref api-compat-macros. +C function is a macro: @see @ref api-compat-macros. #H5Pget_nfilters diff --git a/doxygen/hdf5doxy_layout.xml b/doxygen/hdf5doxy_layout.xml index d895b2dd5bd..20e951856c7 100644 --- a/doxygen/hdf5doxy_layout.xml +++ b/doxygen/hdf5doxy_layout.xml @@ -5,12 +5,12 @@ - + + --> diff --git a/hl/fortran/test/Makefile.am b/hl/fortran/test/Makefile.am index a74f8ef0fbf..adaa59db4cc 100644 --- a/hl/fortran/test/Makefile.am +++ b/hl/fortran/test/Makefile.am @@ -15,7 +15,9 @@ ## # # HDF5 High-Level Fortran Makefile(.in) - +# +# Autoconf cannot figure out dependencies between modules; disable parallel make +.NOTPARALLEL: include $(top_srcdir)/config/commence.am AM_CPPFLAGS+=-I$(top_srcdir)/src -I$(top_builddir)/src -I$(top_srcdir)/hl/src diff --git a/hl/src/H5DOpublic.h b/hl/src/H5DOpublic.h index 661ca7a2abe..09a8f64829f 100644 --- a/hl/src/H5DOpublic.h +++ b/hl/src/H5DOpublic.h @@ -161,7 +161,7 @@ H5_HLDLL herr_t H5DOappend(hid_t dset_id, hid_t dxpl_id, unsigned axis, size_t e * from one datatype to another, and the filter pipeline to write the chunk. * Developers should have experience with these processes before * using this function. Please see - * + * * Using the Direct Chunk Write Function * for more information. * diff --git a/hl/src/H5DSpublic.h b/hl/src/H5DSpublic.h index 4afe51180f9..6a08be8e5c2 100644 --- a/hl/src/H5DSpublic.h +++ b/hl/src/H5DSpublic.h @@ -117,7 +117,7 @@ H5_HLDLL herr_t H5DSwith_new_ref(hid_t obj_id, hbool_t *with_new_ref); * * Entries are created in the #DIMENSION_LIST and * #REFERENCE_LIST attributes, as defined in section 4.2 of - * + * * HDF5 Dimension Scale Specification. * * Fails if: @@ -147,7 +147,7 @@ H5_HLDLL herr_t H5DSattach_scale(hid_t did, hid_t dsid, unsigned int idx); * dimension \p idx of dataset \p did. This deletes the entries in the * #DIMENSION_LIST and #REFERENCE_LIST attributes, * as defined in section 4.2 of - * + * * HDF5 Dimension Scale Specification. * * Fails if: @@ -180,7 +180,7 @@ H5_HLDLL herr_t H5DSdetach_scale(hid_t did, hid_t dsid, unsigned int idx); * as defined above. Creates the CLASS attribute, set to the value * "DIMENSION_SCALE" and an empty #REFERENCE_LIST attribute, * as described in - * + * * HDF5 Dimension Scale Specification. * (PDF, see section 4.2). * diff --git a/hl/src/H5LTpublic.h b/hl/src/H5LTpublic.h index 18f7502209f..514fe244e10 100644 --- a/hl/src/H5LTpublic.h +++ b/hl/src/H5LTpublic.h @@ -1386,8 +1386,8 @@ H5_HLDLL herr_t H5LTget_attribute_info(hid_t loc_id, const char *obj_name, const * \p lang_type definition of HDF5 datatypes. * Currently, only the DDL(#H5LT_DDL) is supported. * The complete DDL definition of HDF5 datatypes can be found in - * the last chapter of the - * + * the specifications chapter of the + * * HDF5 User's Guide. * * \par Example @@ -1424,8 +1424,8 @@ H5_HLDLL hid_t H5LTtext_to_dtype(const char *text, H5LT_lang_t lang_type); * * Currently only DDL (#H5LT_DDL) is supported for \p lang_type. * The complete DDL definition of HDF5 data types can be found in - * the last chapter of the - * + * the specifications chapter of the + * * HDF5 User's Guide. * * \par Example @@ -1625,7 +1625,7 @@ H5_HLDLL htri_t H5LTpath_valid(hid_t loc_id, const char *path, hbool_t check_obj * \note **Recommended Reading:** * \note This function is part of the file image operations feature set. * It is highly recommended to study the guide - * + * * HDF5 File Image Operations before using this feature set.\n * See the “See Also” section below for links to other elements of * HDF5 file image operations. diff --git a/hl/tools/gif2h5/decompress.c b/hl/tools/gif2h5/decompress.c index e87a60cf7af..62a22922ff4 100644 --- a/hl/tools/gif2h5/decompress.c +++ b/hl/tools/gif2h5/decompress.c @@ -296,6 +296,10 @@ Decompress(GIFIMAGEDESC *GifImageDesc, GIFHEAD *GifHead) * Build the hash table on-the-fly. No table is stored in the * file. */ + if (FreeCode >= 4096) { + printf("Error: FreeCode out of bounds\n"); + exit(EXIT_FAILURE); + } Prefix[FreeCode] = OldCode; Suffix[FreeCode] = FinChar; OldCode = InCode; diff --git a/java/src/hdf/overview.html b/java/src/hdf/overview.html index 84e945b2f87..8329277cda7 100644 --- a/java/src/hdf/overview.html +++ b/java/src/hdf/overview.html @@ -91,6 +91,6 @@

    and the HDF5 library.

    To Obtain

    -The JHI5 is included with the HDF5 library. +The JHI5 is included with the HDF5 library. diff --git a/java/src/jni/exceptionImp.c b/java/src/jni/exceptionImp.c index 4cf03ac9f28..6b2004ddeb4 100644 --- a/java/src/jni/exceptionImp.c +++ b/java/src/jni/exceptionImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5Constants.c b/java/src/jni/h5Constants.c index 3a1cfae9798..8f932b7ba54 100644 --- a/java/src/jni/h5Constants.c +++ b/java/src/jni/h5Constants.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5Imp.c b/java/src/jni/h5Imp.c index 898b52ad3ed..6092419c256 100644 --- a/java/src/jni/h5Imp.c +++ b/java/src/jni/h5Imp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5aImp.c b/java/src/jni/h5aImp.c index 54c862eff6c..b6ed1c4c3e1 100644 --- a/java/src/jni/h5aImp.c +++ b/java/src/jni/h5aImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5dImp.c b/java/src/jni/h5dImp.c index f6318b222d4..363936b76e9 100644 --- a/java/src/jni/h5dImp.c +++ b/java/src/jni/h5dImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5eImp.c b/java/src/jni/h5eImp.c index d52a4f72cd0..89c9362626f 100644 --- a/java/src/jni/h5eImp.c +++ b/java/src/jni/h5eImp.c @@ -21,9 +21,6 @@ extern "C" { * Each routine wraps a single HDF entry point, generally with the * analogous arguments and return codes. * - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * */ #include diff --git a/java/src/jni/h5fImp.c b/java/src/jni/h5fImp.c index 9295383ef4d..6bd17a786cb 100644 --- a/java/src/jni/h5fImp.c +++ b/java/src/jni/h5fImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5gImp.c b/java/src/jni/h5gImp.c index fce68022649..54b72b6c09a 100644 --- a/java/src/jni/h5gImp.c +++ b/java/src/jni/h5gImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5iImp.c b/java/src/jni/h5iImp.c index de70e1e424f..728c3b14ed5 100644 --- a/java/src/jni/h5iImp.c +++ b/java/src/jni/h5iImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5jni.h b/java/src/jni/h5jni.h index ad867083ba9..b1bd968ba7c 100644 --- a/java/src/jni/h5jni.h +++ b/java/src/jni/h5jni.h @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #include #include "H5version.h" #include diff --git a/java/src/jni/h5lImp.c b/java/src/jni/h5lImp.c index 0d9ac7dfc01..7d487999f96 100644 --- a/java/src/jni/h5lImp.c +++ b/java/src/jni/h5lImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5oImp.c b/java/src/jni/h5oImp.c index 15daeafde6b..60a6e4fbf90 100644 --- a/java/src/jni/h5oImp.c +++ b/java/src/jni/h5oImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5pACPLImp.c b/java/src/jni/h5pACPLImp.c index 4635fa7373b..7c9895a6de1 100644 --- a/java/src/jni/h5pACPLImp.c +++ b/java/src/jni/h5pACPLImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5pDAPLImp.c b/java/src/jni/h5pDAPLImp.c index 01c3983c2cc..44378a1dc5e 100644 --- a/java/src/jni/h5pDAPLImp.c +++ b/java/src/jni/h5pDAPLImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5pDCPLImp.c b/java/src/jni/h5pDCPLImp.c index bd44f447bd1..dbc57d0120a 100644 --- a/java/src/jni/h5pDCPLImp.c +++ b/java/src/jni/h5pDCPLImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5pDXPLImp.c b/java/src/jni/h5pDXPLImp.c index 31f6d02b860..3b519ef2709 100644 --- a/java/src/jni/h5pDXPLImp.c +++ b/java/src/jni/h5pDXPLImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5pFAPLImp.c b/java/src/jni/h5pFAPLImp.c index af56336fb55..24b7f357e50 100644 --- a/java/src/jni/h5pFAPLImp.c +++ b/java/src/jni/h5pFAPLImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5pFCPLImp.c b/java/src/jni/h5pFCPLImp.c index 7c1b44add5f..56b4e921aae 100644 --- a/java/src/jni/h5pFCPLImp.c +++ b/java/src/jni/h5pFCPLImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5pGAPLImp.c b/java/src/jni/h5pGAPLImp.c index 0ee65710ac5..b38bd4b3b23 100644 --- a/java/src/jni/h5pGAPLImp.c +++ b/java/src/jni/h5pGAPLImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5pGCPLImp.c b/java/src/jni/h5pGCPLImp.c index 49d79dc2366..b71558012ce 100644 --- a/java/src/jni/h5pGCPLImp.c +++ b/java/src/jni/h5pGCPLImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5pImp.c b/java/src/jni/h5pImp.c index c952ccb9dff..6c17984ae24 100644 --- a/java/src/jni/h5pImp.c +++ b/java/src/jni/h5pImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5pLAPLImp.c b/java/src/jni/h5pLAPLImp.c index 3048c155413..36813e33fc9 100644 --- a/java/src/jni/h5pLAPLImp.c +++ b/java/src/jni/h5pLAPLImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5pLCPLImp.c b/java/src/jni/h5pLCPLImp.c index ecabadd29bc..e27a9eb1570 100644 --- a/java/src/jni/h5pLCPLImp.c +++ b/java/src/jni/h5pLCPLImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5pOCPLImp.c b/java/src/jni/h5pOCPLImp.c index 7cd9b5c721f..a743cbaa7f4 100644 --- a/java/src/jni/h5pOCPLImp.c +++ b/java/src/jni/h5pOCPLImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5pOCpyPLImp.c b/java/src/jni/h5pOCpyPLImp.c index c4d2ed7fd14..a78aaa259f0 100644 --- a/java/src/jni/h5pOCpyPLImp.c +++ b/java/src/jni/h5pOCpyPLImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5pStrCPLImp.c b/java/src/jni/h5pStrCPLImp.c index 0045efa342e..3382f0aea30 100644 --- a/java/src/jni/h5pStrCPLImp.c +++ b/java/src/jni/h5pStrCPLImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5plImp.c b/java/src/jni/h5plImp.c index 3c87fd52a99..9632e9e2609 100644 --- a/java/src/jni/h5plImp.c +++ b/java/src/jni/h5plImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5rImp.c b/java/src/jni/h5rImp.c index f97f803f90e..4ccad5457a2 100644 --- a/java/src/jni/h5rImp.c +++ b/java/src/jni/h5rImp.c @@ -10,11 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5sImp.c b/java/src/jni/h5sImp.c index 55fb268434f..738db67ffee 100644 --- a/java/src/jni/h5sImp.c +++ b/java/src/jni/h5sImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5tImp.c b/java/src/jni/h5tImp.c index 309454b16e4..316455715ac 100644 --- a/java/src/jni/h5tImp.c +++ b/java/src/jni/h5tImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5util.c b/java/src/jni/h5util.c index 9c441729a39..fb619aa619d 100644 --- a/java/src/jni/h5util.c +++ b/java/src/jni/h5util.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5util.h b/java/src/jni/h5util.h index 5af96afaee9..011aaec428f 100644 --- a/java/src/jni/h5util.h +++ b/java/src/jni/h5util.h @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifndef H5UTIL_H__ #define H5UTIL_H__ diff --git a/java/src/jni/h5vlImp.c b/java/src/jni/h5vlImp.c index 2bf0b8d6b0a..47e532a5609 100644 --- a/java/src/jni/h5vlImp.c +++ b/java/src/jni/h5vlImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/h5zImp.c b/java/src/jni/h5zImp.c index e6d37bfa3af..9c387fa33ee 100644 --- a/java/src/jni/h5zImp.c +++ b/java/src/jni/h5zImp.c @@ -10,12 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ - #ifdef __cplusplus extern "C" { #endif /* __cplusplus */ diff --git a/java/src/jni/nativeData.c b/java/src/jni/nativeData.c index d25951ff436..d014b64579d 100644 --- a/java/src/jni/nativeData.c +++ b/java/src/jni/nativeData.c @@ -10,11 +10,6 @@ * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ -/* - * For details of the HDF libraries, see the HDF Documentation at: - * https://portal.hdfgroup.org/documentation/index.html - * - */ /* * This module contains the implementation of all the native methods * used for number conversion. This is represented by the Java diff --git a/release_docs/INSTALL b/release_docs/INSTALL index 9373192912b..63f2115fdd6 100644 --- a/release_docs/INSTALL +++ b/release_docs/INSTALL @@ -49,7 +49,7 @@ CONTENTS include the Szip library with the encoder enabled. These can be found here: - https://www.hdfgroup.org/downloads/hdf5/ + https://support.hdfgroup.org/downloads/HDF5 Please notice that if HDF5 configure cannot find a valid Szip library, configure will not fail; in this case, the compression filter will diff --git a/release_docs/INSTALL_Autotools.txt b/release_docs/INSTALL_Autotools.txt index d0ad3b6d69b..0dd6f192308 100644 --- a/release_docs/INSTALL_Autotools.txt +++ b/release_docs/INSTALL_Autotools.txt @@ -334,7 +334,7 @@ III. Full installation instructions for source distributions (or '--with-pthread=DIR') flag to the configure script. For further information, see: - https://portal.hdfgroup.org/display/knowledge/Questions+about+thread-safety+and+concurrent+access + https://support.hdfgroup.org/documentation/HDF5/Questions+about+thread-safety+and+concurrent+access The high-level, C++, Fortran and Java interfaces are not compatible with the thread-safety option because the lock is not hoisted @@ -490,7 +490,7 @@ IV. Using the Library For information on using HDF5 see the documentation, tutorials and examples found here: - https://portal.hdfgroup.org/documentation/index.html + https://support.hdfgroup.org/documentation/HDF5/index.html A summary of the features included in the built HDF5 installation can be found in the libhdf5.settings file in the same directory as the static and/or diff --git a/release_docs/INSTALL_parallel b/release_docs/INSTALL_parallel index 9eb486f79d2..df255c6e0ad 100644 --- a/release_docs/INSTALL_parallel +++ b/release_docs/INSTALL_parallel @@ -90,7 +90,7 @@ nodes. They would probably work for other Cray systems but have not been verified. Obtain the HDF5 source code: - https://portal.hdfgroup.org/display/support/Downloads + https://support.hdfgroup.org/downloads/HDF5 The entire build process should be done on a MOM node in an interactive allocation and on a file system accessible by all compute nodes. Request an interactive allocation with qsub: diff --git a/release_docs/RELEASE.txt b/release_docs/RELEASE.txt index 067b0e6cf5c..18b75e3530c 100644 --- a/release_docs/RELEASE.txt +++ b/release_docs/RELEASE.txt @@ -15,16 +15,16 @@ final release. Links to HDF5 documentation can be found on: - https://portal.hdfgroup.org/documentation/ + https://support.hdfgroup.org/documentation/HDF5 The official HDF5 releases can be obtained from: - https://www.hdfgroup.org/downloads/hdf5/ + https://support.hdfgroup.org/downloads/HDF5/ Changes from release to release and new features in the HDF5-1.14.x release series can be found at: - https://portal.hdfgroup.org/documentation/hdf5-docs/release_specific_info.html + https://support.hdfgroup.org/documentation/HDF5/release_specific_info.html If you have any questions or comments, please send them to the HDF Help Desk: diff --git a/release_docs/RELEASE_PROCESS.md b/release_docs/RELEASE_PROCESS.md index 0bd8d39307f..f155be5d6fe 100644 --- a/release_docs/RELEASE_PROCESS.md +++ b/release_docs/RELEASE_PROCESS.md @@ -18,7 +18,7 @@ Maintenance releases are always forward compatible with regards to the HDF5 file - HDF5 libraries and command line utilities can access files created by future maintenance versions of the library. Note that maintenance releases are NOT guaranteed to be interface-compatible, meaning that, on occasion, application source code will need updated and re-compiled against a new maintenance release when the interface changes. Interface changes are only made when absolutely necessary as deemed by the HDF5 product manager(s), and interface compatibility reports are published with each release to inform customers and users of any incompatibilities in the interface. -For more information on the HDF5 versioning and backward and forward compatibility issues, see the [API Compatibility Macros](https://hdfgroup.github.io/hdf5/develop/api-compat-macros.html) on the public website. +For more information on the HDF5 versioning and backward and forward compatibility issues, see the [API Compatibility Macros][u13] on the public website. ## Participants: - Product Manager — The individual responsible for the overall direction and development of a software product at The HDF Group. @@ -35,21 +35,21 @@ For more information on the HDF5 versioning and backward and forward compatibili ### 3. Prepare Release Notes (Release Manager) 1. Confirm that all non-trivial changes made to the source are reflected in the release notes. Verify the following: - [HDF5 Milestones Projects](https://github.com/HDFGroup/hdf5/milestones) - - Each entry in [RELEASE.txt](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/release_docs/RELEASE.txt) traces to one or more resolved GH issues marked with FixVersion="X.Y.Z". - - Each resolved GH milestone issue traces to an entry in [RELEASE.txt](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/release_docs/RELEASE.txt). + - Each entry in [RELEASE.txt][u1] traces to one or more resolved GH issues marked with FixVersion="X.Y.Z". + - Each resolved GH milestone issue traces to an entry in [RELEASE.txt][u1]. - Each resolved GH milestone issue traces to one or more revisions to the HDF5 source. - Each resolved GH milestone issue traces to one or more pull requests. -2. For each previously authored KNOWN ISSUE in the [RELEASE.txt](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/release_docs/RELEASE.txt), if the issue has been resolved or can no longer be confirmed, remove the issue from the [RELEASE.txt](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/release_docs/RELEASE.txt). +2. For each previously authored KNOWN ISSUE in the [RELEASE.txt][u1], if the issue has been resolved or can no longer be confirmed, remove the issue from the [RELEASE.txt][u1]. - Document any new known issues at the top of the list. -3. Update the TESTED CONFIGURATION FEATURES SUMMARY in [RELEASE.txt](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/release_docs/RELEASE.txt) to correspond to features and options that have been tested during the maintenance period by the automated daily regression tests. +3. Update the TESTED CONFIGURATION FEATURES SUMMARY in [RELEASE.txt][u1] to correspond to features and options that have been tested during the maintenance period by the automated daily regression tests. - **See: Testing/Testing Systems(this is a page in confluence)** -4. Update current compiler information for each platform in the PLATFORMS TESTED section of [RELEASE.txt](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/release_docs/RELEASE.txt). -5. Review the [RELEASE.txt](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/release_docs/RELEASE.txt) for formatting and language to verify that it corresponds to guidelines found in **[Writing Notes in a RELEASE.txt(this is missing)]()** File. -6. Review and update, if needed, the [README](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/README.md) and [COPYING](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/COPYING) files. -7. Review and update all INSTALL_* files in [release_docs](https://github.com/HDFGroup/hdf5/tree/hdf5_1_14/release_docs), if needed. - - [INSTALL](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/release_docs/INSTALL) should be general info and not require extensive changes - - [INSTALL_Autotools.txt](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/release_docs/INSTALL_Autotools.txt) are the instructions for building under autotools. - - [INSTALL_CMake.txt](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/release_docs/INSTALL_CMake.txt) are the instructions for building under CMake. +4. Update current compiler information for each platform in the PLATFORMS TESTED section of [RELEASE.txt][u1]. +5. Review the [RELEASE.txt][u1] for formatting and language to verify that it corresponds to guidelines found in **[Writing Notes in a RELEASE.txt(this is missing)]()** File. +6. Review and update, if needed, the [README][u2] and [COPYING][u3] files. +7. Review and update all INSTALL_* files in [release_docs][u4], if needed. + - [INSTALL][u5] should be general info and not require extensive changes + - [INSTALL_Autotools.txt][u6] are the instructions for building under autotools. + - [INSTALL_CMake.txt][u7] are the instructions for building under CMake. ### 4. Freeze Code (Release Manager | Test Automation Team) 1. Transition from performing maintenance on software to preparing for its delivery. @@ -62,14 +62,14 @@ For more information on the HDF5 versioning and backward and forward compatibili ### 5. Update Interface Version (Release Manager | Product Manager) 1. Verify interface additions, changes, and removals, and update the shared library interface version number. 2. Execute the CI snapshot workflow. - - Actions - “[hdf5 release build](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/.github/workflows/release.yml)” workflow and use the defaults. + - Actions - "[hdf5 release build][u8]" workflow and use the defaults. 3. Download and inspect release build source and binary files. Downloaded source files should build correctly, one or more binaries should install and run correctly. There should be nothing missing nor any extraneous files that aren’t meant for release. -4. Verify the interface compatibility reports between the current source and the previous release on the Github [Snapshots](https://github.com/HDFGroup/hdf5/releases/tag/snapshot-1.14) page. - - The compatibility reports are produced by the CI and are viewable in the Github [Releases/snapshot](https://github.com/HDFGroup/hdf5/releases/tag/snapshot) section. -5. Verify the interface compatibility reports between the current source and the previous release on the Github [Snapshots](https://github.com/HDFGroup/hdf5/releases/tag/snapshot-1.14) page. - - The compatibility reports are produced by the CI and are viewable in the Github [Releases/snapshot](https://github.com/HDFGroup/hdf5/releases/tag/snapshot) section. +4. Verify the interface compatibility reports between the current source and the previous release on the Github [Snapshots]u14] page. + - The compatibility reports are produced by the CI and are viewable in the Github [Releases/snapshot][u15] section. +5. Verify the interface compatibility reports between the current source and the previous release on the Github [Snapshots][u14] page. + - The compatibility reports are produced by the CI and are viewable in the Github [Releases/snapshot][u15] section. 6. Confirm the necessity of and approve of any interface-breaking changes. If any changes need to be reverted, task the developer who made the change to do so as soon as possible. If a change is reverted, return to the previous step and regenerate the compatibility report after the changes is made. Otherwise, continue to the next step. -7. Update the .so version numbers in the [config/lt_vers.am](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/config/lt_vers.am) file in the support branch according to [libtool's library interface version](https://www.gnu.org/software/libtool/manual/libtool.html#Versioning) scheme. +7. Update the .so version numbers in the [config/lt_vers.am][u9] file in the support branch according to [libtool's library interface version](https://www.gnu.org/software/libtool/manual/libtool.html#Versioning) scheme. - See [Updating version info (Libtool)](https://www.gnu.org/software/libtool/manual/html_node/Updating-version-info.html#Updating-version-info) for rules to help update library version numbers. 8. After the release branch has been created, run `./autogen.sh` to regenerate build system files on the release branch and commit the changes. @@ -83,21 +83,21 @@ For more information on the HDF5 versioning and backward and forward compatibili - or create the new branch in GitHub GUI. 4. Check that required CMake files point to the specific versions of the third-party software (szip, zlib and plugins) that they depend on. - Update as needed. -5. Change the **support** branch to X.Y.{Z+1}-1 using the [bin/h5vers](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/bin/h5vers) script: +5. Change the **support** branch to X.Y.{Z+1}-1 using the [bin/h5vers][u10] script: - `$ git checkout hdf5_X_Y` - `$ bin/h5vers -s X.Y.{Z+1}-1;` - `$ git commit -m "Updated support branch version number to X.Y.{Z+1}-1"` - `$ git push` -6. Change the **release preparation branch**'s version number to X.Y.Z-{SR+1} using the [bin/h5vers](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/bin/h5vers) script: +6. Change the **release preparation branch**'s version number to X.Y.Z-{SR+1} using the [bin/h5vers][u10]/bin/h5vers script: - `$ git checkout hdf5_X_Y_Z;` - `$ bin/h5vers -s X.Y.Z-{SR+1};` - `$ git commit -m "Updated release preparation branch version number to X.Y.Z-{SR+1}"` - `$ git push` 7. Update default configuration mode - `$ git checkout hdf5_X_Y_Z;` and `$ bin/switch_maint_mode -disable ./configure.ac` to disable `AM_MAINTAINER_MODE`. - - Need to set option `HDF5_GENERATE_HEADERS` to `OFF`, currently in line 996 of [src/CMakeLists.txt](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/src/CMakeLists.txt). - - Change the **release preparation branch**'s (i.e. hdf5_X_Y_Z) default configuration mode from development to production in [configure.ac](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/configure.ac). - - Find “Determine build mode” in [configure.ac](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/configure.ac). + - Need to set option `HDF5_GENERATE_HEADERS` to `OFF`, currently in line 996 of [src/CMakeLists.txt][11]. + - Change the **release preparation branch**'s (i.e. hdf5_X_Y_Z) default configuration mode from development to production in [configure.ac][u12]. + - Find "Determine build mode" in [configure.ac][u12]. - Change `default=debug` to `default=production` at the bottom of the `AS_HELP_STRING` for `--enable-build-mode`. - Under `if test "X-$BUILD_MODE" = X- ; then` change `BUILD_MODE=debug` to `BUILD_MODE=production`. - Run `sh ./autogen.sh` to regenerate the UNIX build system files and commit the changes. (use `git status --ignored` to see the changes and `git add -f` to add all files. First delete any new files not to be committed, notably `src/H5public.h~` and `autom4te.cache/`.) @@ -114,7 +114,7 @@ For more information on the HDF5 versioning and backward and forward compatibili 7. Choose the release branch 8. Change ‘Release version tag’ name to 'hdf5_X.Y.Z.P' - P is some pre-release number. -9. Send a message to the HDF forum indicating that a pre-release source package is available for testing at and that feedback from the user community on their test results is being accepted. +9. Send a message to the HDF forum indicating that a pre-release source package is available for testing at /{hdf5-X.Y.Z-P}> and that feedback from the user community on their test results is being accepted. 10. Contact paying clients who are interested in testing the pre-release source package and inform them that it is available for testing and that feedback on their test results of the pre-release is appreciated. 11. This should be automated and currently github binaries are not signed. - Follow the [How to sign binaries with digital certificates(this is missing)]() work instructions to sign each Windows and Mac binary package with a digital certificate. @@ -137,7 +137,7 @@ For more information on the HDF5 versioning and backward and forward compatibili ### 8. Finalize Release Notes (Release Manager) 1. Perform a final review of release notes and ensure that any new changes made to the source, any new known issues discovered, and any additional tests run since the code freeze have been reflected in RELEASE.txt and other appropriate in-source documentation files (INSTALL_*, etc.). (Refer to the sub-steps of step 3 for what to check). -2. Update the [RELEASE.txt](https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/release_docs/RELEASE.txt) in the **support** branch (i.e. hdf5_X_Y) to remove entries in “Bugs fixed” and “New Features” sections and increment the version number for the following release (“Bug fixes since X.Y.Z” - occurs twice). +2. Update the [RELEASE.txt][u1] in the **support** branch (i.e. hdf5_X_Y) to remove entries in "Bugs fixed" and "New Features" sections and increment the version number for the following release ("Bug fixes since X.Y.Z" - occurs twice). - `$ git checkout hdf5_X_Y` - `$ vi RELEASE.txt # update RELEASE.txt to clear it out` - `$ git commit -m "Reset RELEASE.txt in preparation for the next release."` @@ -161,3 +161,19 @@ For more information on the HDF5 versioning and backward and forward compatibili ### 11. Conduct Release Retrospective (Release Manager) 1. Schedule time and solicit comments from retrospective 2. Identify issues and document them + +[u1]: https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/release_docs/RELEASE.txt +[u2]: https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/README.md +[u3]: https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/COPYING +[u4]: https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/release_docs +[u5]: https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/release_docs/INSTALL +[u6]: https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/release_docs/INSTALL_Autotools.txt +[u7]: https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/release_docs/INSTALL_CMake.txt +[u8]: https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/.github/workflows/release.yml +[u9]: https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/config/lt_vers.am +[u10]: https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/bin/h5vers +[u11]: https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/src/CMakeLists.txt +[u12]: https://github.com/HDFGroup/hdf5/blob/hdf5_1_14/configure.ac +[u13]: https://support.hdfgroup.org/documentation/hdf5/v1_14/v1_14_4/api-compat-macros.html +[u14]: https://github.com/HDFGroup/hdf5/releases/tag/snapshot-1.14 +[u15]: https://github.com/HDFGroup/hdf5/releases/tag/snapshot diff --git a/release_docs/USING_CMake_Examples.txt b/release_docs/USING_CMake_Examples.txt index 5e7c66bc13a..92c8ef5461e 100644 --- a/release_docs/USING_CMake_Examples.txt +++ b/release_docs/USING_CMake_Examples.txt @@ -78,12 +78,11 @@ III. Defaults in the HDF5_Examples_options.cmake file #### DEFAULT: ### #### BUILD_SHARED_LIBS:BOOL=OFF ### -#### HDF_BUILD_C:BOOL=ON ### -#### HDF_BUILD_CXX:BOOL=OFF ### -#### HDF_BUILD_FORTRAN:BOOL=OFF ### -#### HDF_BUILD_JAVA:BOOL=OFF ### -#### HDF_BUILD_FILTERS:BOOL=OFF ### -#### BUILD_TESTING:BOOL=OFF ### -#### HDF_ENABLE_PARALLEL:BOOL=OFF ### -#### HDF_ENABLE_THREADSAFE:BOOL=OFF ### +#### H5EX_BUILD_C:BOOL=ON ### +#### H5EX_BUILD_CXX:BOOL=OFF ### +#### H5EX_BUILD_FORTRAN:BOOL=OFF ### +#### H5EX_BUILD_JAVA:BOOL=OFF ### +#### H5EX_BUILD_FILTERS:BOOL=OFF ### +#### H5EX_BUILD_TESTING:BOOL=OFF ### +#### H5EX_ENABLE_PARALLEL:BOOL=OFF ### diff --git a/src/H5Amodule.h b/src/H5Amodule.h index 18fabe56f58..42715535367 100644 --- a/src/H5Amodule.h +++ b/src/H5Amodule.h @@ -59,7 +59,7 @@ * attached directly to that object * * \subsection subsec_error_H5A Attribute Function Summaries - * @see H5A reference manual + * see @ref H5A reference manual * * \subsection subsec_attribute_program Programming Model for Attributes * @@ -98,26 +98,6 @@ * \li Close the attribute * \li Close the primary data object (if appropriate) * - * - * - * - * - * - * - * - * - * - * - *
    CreateUpdate
    - * \snippet{lineno} H5A_examples.c create - * - * \snippet{lineno} H5A_examples.c update - *
    ReadDelete
    - * \snippet{lineno} H5A_examples.c read - * - * \snippet{lineno} H5A_examples.c delete - *
    - * * \subsection subsec_attribute_work Working with Attributes * * \subsubsection subsubsec_attribute_work_struct The Structure of an Attribute @@ -376,7 +356,7 @@ * An HDF5 attribute is a small metadata object describing the nature and/or intended usage of a primary data * object. A primary data object may be a dataset, group, or committed datatype. * - * @see sec_attribute + * @see \ref sec_attribute * */ diff --git a/src/H5Cpkg.h b/src/H5Cpkg.h index 208944e1053..6a636aee76c 100644 --- a/src/H5Cpkg.h +++ b/src/H5Cpkg.h @@ -1912,7 +1912,7 @@ typedef struct H5C_tag_info_t { * the hash table. Note that the index_size field (above) * is also the sum of the sizes of all entries in the cache. * Thus we should have the invariant that clean_index_size + - * dirty_index_size == index_size. + * dirty_index_size = index_size. * * WARNING: * The value of the clean_index_size must not be mistaken for @@ -1929,7 +1929,7 @@ typedef struct H5C_tag_info_t { * the hash table. Note that the index_size field (above) * is also the sum of the sizes of all entries in the cache. * Thus we should have the invariant that clean_index_size + - * dirty_index_size == index_size. + * dirty_index_size = index_size. * * dirty_index_ring_size: Array of size_t of length H5C_RING_NTYPES used to * maintain the sum of the sizes of all dirty entries in the @@ -2025,12 +2025,12 @@ typedef struct H5C_tag_info_t { * The cost of maintaining the skip list is significant. As it is only used * on flush and close, it is maintained only when needed. * - * To do this, we add a flag to control maintenanace of the skip list. + * To do this, we add a flag to control maintenance of the skip list. * This flag is initially set to false, which disables all operations * on the skip list. * * At the beginning of either flush or close, we scan the index list, - * insert all dirtly entries in the skip list, and enable operations + * insert all dirty entries in the skip list, and enable operations * on skip list by setting above control flag to true. * * In the case of a partial flush (i.e. flush tagged entries), we only diff --git a/src/H5Cprivate.h b/src/H5Cprivate.h index 9f123123bc6..d7065799a67 100644 --- a/src/H5Cprivate.h +++ b/src/H5Cprivate.h @@ -903,7 +903,7 @@ typedef herr_t (*H5C_log_flush_func_t)(H5C_t *cache_ptr, haddr_t addr, bool was_ * * Note that flush dependencies are used to order flushes within rings. * - * Note also that at the conceptual level, rings are argueably superfluous, + * Note also that at the conceptual level, rings are arguably superfluous, * as a similar effect could be obtained via the flush dependency mechanism. * However, this would require all entries in the cache to participate in a * flush dependency -- with the implied setup and takedown overhead and diff --git a/src/H5Dmodule.h b/src/H5Dmodule.h index 26e748ce1a0..96c5b1a704e 100644 --- a/src/H5Dmodule.h +++ b/src/H5Dmodule.h @@ -887,7 +887,7 @@ filter. * It is clear that the internal HDF5 filter mechanism, while extensible, does not work well with third-party * filters. It would be a maintenance nightmare to keep adding and supporting new compression methods * in HDF5. For any set of HDF5 “internal” filters, there always will be data with which the “internal” -filters + * filters * will not achieve the optimal performance needed to address data I/O and storage problems. Thus the * internal HDF5 filter mechanism is enhanced to address the issues discussed above. * @@ -901,7 +901,7 @@ filters * * When an application reads data compressed with a third-party HDF5 filter, the HDF5 Library will search * for the required filter plugin, register the filter with the library (if the filter function is not -registered) and + * registered) and * apply it to the data on the read operation. * * For more information, @@ -1496,7 +1496,7 @@ allocated if necessary. * the size of the memory datatype and the number of elements in the memory selection. * * Variable-length data are organized in two or more areas of memory. For more information, - * \see \ref h4_vlen_datatype "Variable-length Datatypes". + * see \ref h4_vlen_datatype "Variable-length Datatypes". * * When writing data, the application creates an array of * vl_info_t which contains pointers to the elements. The elements might be, for example, strings. @@ -2735,7 +2735,7 @@ allocated if necessary. * See The HDF Group website for further information regarding the SZip filter. * * \subsubsection subsubsec_dataset_filters_dyn Using Dynamically-Loadable Filters - * \see \ref sec_filter_plugins for further information regarding the dynamically-loadable filters. + * see \ref sec_filter_plugins for further information regarding the dynamically-loadable filters. * * HDF has a filter plugin repository of useful third-party plugins that can used * diff --git a/src/H5Emodule.h b/src/H5Emodule.h index 307b5a7fac4..f46456a1369 100644 --- a/src/H5Emodule.h +++ b/src/H5Emodule.h @@ -58,7 +58,7 @@ * design for the Error Handling API. * * \subsection subsec_error_H5E Error Handling Function Summaries - * @see H5E reference manual + * see @ref H5E reference manual * * \subsection subsec_error_program Programming Model for Error Handling * This section is under construction. @@ -80,24 +80,21 @@ * an error stack ID is needed as a parameter, \ref H5E_DEFAULT can be used to indicate the library's default * stack. The first error record of the error stack, number #000, is produced by the API function itself and * is usually sufficient to indicate to the application what went wrong. - *
    - * - * - * - * - *
    Example: An Error Message
    - *

    If an application calls \ref H5Tclose on a - * predefined datatype then the following message is - * printed on the standard error stream. This is a - * simple error that has only one component, the API - * function; other errors may have many components. - *

    + *
    + * If an application calls \ref H5Tclose  on a
    + * predefined datatype then the following message is
    + * printed on the standard error stream.  This is a
    + * simple error that has only one component, the API
    + * function; other errors may have many components.
    + *
    + * An Error Message Example
    + * \code
      * HDF5-DIAG: Error detected in HDF5 (1.10.9) thread 0.
      *    #000: H5T.c line ### in H5Tclose(): predefined datatype
      *       major: Function argument
      *       minor: Bad value
    - *         
    - *
    + * \endcode + * * In the example above, we can see that an error record has a major message and a minor message. A major * message generally indicates where the error happens. The location can be a dataset or a dataspace, for * example. A minor message explains further details of the error. An example is “unable to open file”. @@ -158,15 +155,15 @@ * * Example: Turn off error messages while probing a function * \code - * *** Save old error handler *** + * // Save old error handler * H5E_auto2_t oldfunc; * void *old_client_data; * H5Eget_auto2(error_stack, &old_func, &old_client_data); - * *** Turn off error handling *** + * // Turn off error handling * H5Eset_auto2(error_stack, NULL, NULL); - * *** Probe. Likely to fail, but that's okay *** + * // Probe. Likely to fail, but that's okay * status = H5Fopen (......); - * *** Restore previous error handler *** + * // Restore previous error handler * H5Eset_auto2(error_stack, old_func, old_client_data); * \endcode * @@ -174,9 +171,9 @@ * * Example: Disable automatic printing and explicitly print error messages * \code - * *** Turn off error handling permanently *** + * // Turn off error handling permanently * H5Eset_auto2(error_stack, NULL, NULL); - * *** If failure, print error message *** + * // If failure, print error message * if (H5Fopen (....)<0) { * H5Eprint2(H5E_DEFAULT, stderr); * exit (1); @@ -243,9 +240,9 @@ * * The following example shows a user‐defined callback function. * - * Example: A user‐defined callback function + * A user‐defined callback function Example * \code - * \#define MSG_SIZE 64 + * #define MSG_SIZE 64 * herr_t * custom_print_cb(unsigned n, const H5E_error2_t *err_desc, void *client_data) * { @@ -255,7 +252,7 @@ * char cls[MSG_SIZE]; * const int indent = 4; * - * *** Get descriptions for the major and minor error numbers *** + * // Get descriptions for the major and minor error numbers * if(H5Eget_class_name(err_desc->cls_id, cls, MSG_SIZE) < 0) * TEST_ERROR; * if(H5Eget_msg(err_desc->maj_num, NULL, maj, MSG_SIZE) < 0) @@ -296,13 +293,11 @@ * to push its own error records onto the error stack once it declares an error class of its own through the * HDF5 Error API. * - * - * - * - * - * - *
    Example: An Error Report
    - *

    An error report shows both the library's error record and the application's error records. - * See the example below. - *

    + * An error report shows both the library's error record and the application's error records.
    + * See the example below.
    + *
    + * An Error Report Example
    + * \code
      * Error Test-DIAG: Error detected in Error Program (1.0)
      *         thread 8192:
      *     #000: ../../hdf5/test/error_test.c line ### in main():
    @@ -318,10 +313,8 @@
      *         not a dataset
      *       major: Invalid arguments to routine
      *       minor: Inappropriate type
    - *       
    - *
    + *\endcode + * * In the line above error record #002 in the example above, the starting phrase is HDF5. This is the error * class name of the HDF5 Library. All of the library's error messages (major and minor) are in this default * error class. The Error Test in the beginning of the line above error record #000 is the name of the @@ -334,7 +327,7 @@ * * Example: The user‐defined error handler * \code - * \#define MSG_SIZE 64 + * #define MSG_SIZE 64 * herr_t * custom_print_cb(unsigned n, const H5E_error2_t *err_desc, * void* client_data) @@ -345,7 +338,7 @@ * char cls[MSG_SIZE]; * const int indent = 4; * - * *** Get descriptions for the major and minor error numbers *** + * // Get descriptions for the major and minor error numbers * if(H5Eget_class_name(err_desc->cls_id, cls, MSG_SIZE) < 0) * TEST_ERROR; * if(H5Eget_msg(err_desc->maj_num, NULL, maj, MSG_SIZE) < 0) @@ -411,13 +404,13 @@ * * Example: Create an error class and error messages * \code - * *** Create an error class *** + * // Create an error class * class_id = H5Eregister_class(ERR_CLS_NAME, PROG_NAME, PROG_VERS); - * *** Retrieve class name *** + * // Retrieve class name * H5Eget_class_name(class_id, cls_name, cls_size); - * *** Create a major error message in the class *** + * // Create a major error message in the class * maj_id = H5Ecreate_msg(class_id, H5E_MAJOR, “... ...”); - * *** Create a minor error message in the class *** + * // Create a minor error message in the class * min_id = H5Ecreate_msg(class_id, H5E_MINOR, “... ...”); * \endcode * @@ -486,14 +479,14 @@ * * Example: Pushing an error message to an error stack * \code - * *** Make call to HDF5 I/O routine *** + * // Make call to HDF5 I/O routine * if((dset_id=H5Dopen(file_id, dset_name, access_plist)) < 0) * { - * *** Push client error onto error stack *** + * // Push client error onto error stack * H5Epush(H5E_DEFAULT,__FILE__,FUNC,__LINE__,cls_id, * CLIENT_ERR_MAJ_IO,CLIENT_ERR_MINOR_OPEN, “H5Dopen failed”); * } - * *** Indicate error occurred in function *** + * // Indicate error occurred in function * return 0; * \endcode * @@ -504,15 +497,15 @@ * \code * if (H5Dwrite(dset_id, mem_type_id, mem_space_id, file_space_id, dset_xfer_plist_id, buf) < 0) * { - * *** Push client error onto error stack *** + * // Push client error onto error stack * H5Epush2(H5E_DEFAULT,__FILE__,FUNC,__LINE__,cls_id, * CLIENT_ERR_MAJ_IO,CLIENT_ERR_MINOR_HDF5, * “H5Dwrite failed”); - * *** Preserve the error stack by assigning an object handle to it *** + * // Preserve the error stack by assigning an object handle to it * error_stack = H5Eget_current_stack(); - * *** Close dataset *** + * // Close dataset * H5Dclose(dset_id); - * *** Replace the current error stack with the preserved one *** + * // Replace the current error stack with the preserved one * H5Eset_current_stack(error_stack); * } * return 0; @@ -545,7 +538,7 @@ * error stack. The error stack is statically allocated to reduce the * complexity of handling errors within the \ref H5E package. * - * @see sec_error + * @see \ref sec_error * */ diff --git a/src/H5FDfamily.c b/src/H5FDfamily.c index 642da8d0b66..335c63a94f1 100644 --- a/src/H5FDfamily.c +++ b/src/H5FDfamily.c @@ -30,14 +30,14 @@ #include "H5FDdrvr_module.h" /* This source code file is part of the H5FD driver module */ -#include "H5private.h" /* Generic Functions */ -#include "H5Eprivate.h" /* Error handling */ -#include "H5Fprivate.h" /* File access */ -#include "H5FDprivate.h" /* File drivers */ -#include "H5FDfamily.h" /* Family file driver */ -#include "H5Iprivate.h" /* IDs */ -#include "H5MMprivate.h" /* Memory management */ -#include "H5Pprivate.h" /* Property lists */ +#include "H5private.h" /* Generic Functions */ +#include "H5Eprivate.h" /* Error handling */ +#include "H5Fprivate.h" /* File access */ +#include "H5FDprivate.h" /* File drivers */ +#include "H5FDfamily.h" /* Family file driver */ +#include "H5Iprivate.h" /* IDs */ +#include "H5MMprivate.h" /* Memory management */ +#include "H5Pprivate.h" /* Property lists */ /* The size of the member name buffers */ #define H5FD_FAM_MEMB_NAME_BUF_SIZE 4096 @@ -187,10 +187,9 @@ H5FD__family_get_default_config(H5FD_family_fapl_t *fa_out) HGOTO_ERROR(H5E_VFL, H5E_CANTSET, FAIL, "can't set default driver on member FAPL"); done: - if (ret_value < 0 && fa_out->memb_fapl_id >= 0) { + if (ret_value < 0 && fa_out->memb_fapl_id >= 0) if (H5I_dec_ref(fa_out->memb_fapl_id) < 0) HDONE_ERROR(H5E_VFL, H5E_CANTDEC, FAIL, "can't decrement ref. count on member FAPL ID"); - } FUNC_LEAVE_NOAPI(ret_value) } /* end H5FD__family_get_default_config() */ @@ -237,6 +236,7 @@ H5FD__family_get_default_printf_filename(const char *old_filename) if (file_extension) { /* Insert the printf format between the filename and ".h5" extension. */ intptr_t beginningLength = file_extension - old_filename; + snprintf(tmp_buffer, new_filename_len, "%.*s%s%s", (int)beginningLength, old_filename, suffix, ".h5"); } else { @@ -246,15 +246,15 @@ H5FD__family_get_default_printf_filename(const char *old_filename) file_extension = strrchr(old_filename, '.'); if (file_extension) { intptr_t beginningLength = file_extension - old_filename; + snprintf(tmp_buffer, new_filename_len, "%.*s%s%s", (int)beginningLength, old_filename, suffix, file_extension); } - else { + else /* If the filename doesn't contain an extension at all, just insert * the printf format at the end of the filename. */ snprintf(tmp_buffer, new_filename_len, "%s%s", old_filename, suffix); - } } ret_value = tmp_buffer; @@ -618,7 +618,7 @@ H5FD__family_sb_decode(H5FD_t *_file, const char H5_ATTR_UNUSED *name, const uns /* Check if member size from file access property is correct */ if (msize != file->pmem_size) - HGOTO_ERROR(H5E_FILE, H5E_BADVALUE, FAIL, + HGOTO_ERROR(H5E_VFL, H5E_BADVALUE, FAIL, "Family member size should be %lu. But the size from file access property is %lu", (unsigned long)msize, (unsigned long)file->pmem_size); @@ -724,16 +724,16 @@ H5FD__family_open(const char *name, unsigned flags, hid_t fapl_id, haddr_t maxad file->pmem_size = fa->memb_size; /* Member size passed in through property */ if (default_config && H5I_dec_ref(fa->memb_fapl_id) < 0) - HGOTO_ERROR(H5E_ID, H5E_CANTDEC, NULL, "can't decrement ref. count on member FAPL"); + HGOTO_ERROR(H5E_VFL, H5E_CANTDEC, NULL, "can't decrement ref. count on member FAPL"); } /* end else */ file->name = H5MM_strdup(name); file->flags = flags; /* Allocate space for the string buffers */ if (NULL == (memb_name = (char *)H5MM_malloc(H5FD_FAM_MEMB_NAME_BUF_SIZE))) - HGOTO_ERROR(H5E_FILE, H5E_CANTALLOC, NULL, "unable to allocate member name"); + HGOTO_ERROR(H5E_VFL, H5E_CANTALLOC, NULL, "unable to allocate member name"); if (NULL == (temp = (char *)H5MM_malloc(H5FD_FAM_MEMB_NAME_BUF_SIZE))) - HGOTO_ERROR(H5E_FILE, H5E_CANTALLOC, NULL, "unable to allocate temporary member name"); + HGOTO_ERROR(H5E_VFL, H5E_CANTALLOC, NULL, "unable to allocate temporary member name"); /* Check that names are unique */ snprintf(memb_name, H5FD_FAM_MEMB_NAME_BUF_SIZE, name, 0); @@ -746,7 +746,7 @@ H5FD__family_open(const char *name, unsigned flags, hid_t fapl_id, haddr_t maxad name = temp; } else - HGOTO_ERROR(H5E_FILE, H5E_FILEEXISTS, NULL, "file names not unique"); + HGOTO_ERROR(H5E_VFL, H5E_FILEEXISTS, NULL, "file names not unique"); } /* Open all the family members */ @@ -771,17 +771,14 @@ H5FD__family_open(const char *name, unsigned flags, hid_t fapl_id, haddr_t maxad * Allow H5F_ACC_CREAT only on the first family member. */ if (0 == file->nmembs) { - if (NULL == (file->memb[file->nmembs] = H5FDopen(memb_name, (0 == file->nmembs ? flags : t_flags), - file->memb_fapl_id, HADDR_UNDEF))) + if (H5FD_open(false, &file->memb[file->nmembs], memb_name, (0 == file->nmembs ? flags : t_flags), + file->memb_fapl_id, HADDR_UNDEF) < 0) HGOTO_ERROR(H5E_VFL, H5E_CANTOPENFILE, NULL, "unable to open member file"); } else { - H5E_PAUSE_ERRORS - { - file->memb[file->nmembs] = H5FDopen(memb_name, (0 == file->nmembs ? flags : t_flags), - file->memb_fapl_id, HADDR_UNDEF); - } - H5E_RESUME_ERRORS + if (H5FD_open(true, &file->memb[file->nmembs], memb_name, (0 == file->nmembs ? flags : t_flags), + file->memb_fapl_id, HADDR_UNDEF) < 0) + HGOTO_ERROR(H5E_VFL, H5E_CANTOPENFILE, NULL, "unable to open member file"); if (!file->memb[file->nmembs]) break; @@ -794,7 +791,7 @@ H5FD__family_open(const char *name, unsigned flags, hid_t fapl_id, haddr_t maxad * smaller than the size specified through H5Pset_fapl_family(). Update the actual * member size. */ - if ((eof = H5FDget_eof(file->memb[0], H5FD_MEM_DEFAULT))) + if ((eof = H5FD_get_eof(file->memb[0], H5FD_MEM_DEFAULT))) file->memb_size = eof; ret_value = (H5FD_t *)file; @@ -818,7 +815,7 @@ H5FD__family_open(const char *name, unsigned flags, hid_t fapl_id, haddr_t maxad if (H5FD_close(file->memb[u]) < 0) nerrors++; if (nerrors) - HGOTO_ERROR(H5E_FILE, H5E_CANTCLOSEFILE, NULL, "unable to close member files"); + HDONE_ERROR(H5E_FILE, H5E_CANTCLOSEFILE, NULL, "unable to close member files"); if (file->memb) H5MM_xfree(file->memb); @@ -906,7 +903,7 @@ H5FD__family_cmp(const H5FD_t *_f1, const H5FD_t *_f2) assert(f1->nmembs >= 1 && f1->memb[0]); assert(f2->nmembs >= 1 && f2->memb[0]); - ret_value = H5FDcmp(f1->memb[0], f2->memb[0]); + ret_value = H5FD_cmp(f1->memb[0], f2->memb[0]); FUNC_LEAVE_NOAPI(ret_value) } /* end H5FD__family_cmp() */ @@ -1000,7 +997,7 @@ H5FD__family_set_eoa(H5FD_t *_file, H5FD_mem_t type, haddr_t abs_eoa) /* Allocate space for the member name buffer */ if (NULL == (memb_name = (char *)H5MM_malloc(H5FD_FAM_MEMB_NAME_BUF_SIZE))) - HGOTO_ERROR(H5E_FILE, H5E_CANTALLOC, FAIL, "unable to allocate member name"); + HGOTO_ERROR(H5E_VFL, H5E_CANTALLOC, FAIL, "unable to allocate member name"); for (u = 0; addr || u < file->nmembs; u++) { @@ -1021,9 +1018,9 @@ H5FD__family_set_eoa(H5FD_t *_file, H5FD_mem_t type, haddr_t abs_eoa) file->nmembs = MAX(file->nmembs, u + 1); snprintf(memb_name, H5FD_FAM_MEMB_NAME_BUF_SIZE, file->name, u); H5_CHECK_OVERFLOW(file->memb_size, hsize_t, haddr_t); - if (NULL == (file->memb[u] = H5FDopen(memb_name, file->flags | H5F_ACC_CREAT, file->memb_fapl_id, - (haddr_t)file->memb_size))) - HGOTO_ERROR(H5E_FILE, H5E_CANTOPENFILE, FAIL, "unable to open member file"); + if (H5FD_open(false, &file->memb[u], memb_name, file->flags | H5F_ACC_CREAT, file->memb_fapl_id, + (haddr_t)file->memb_size) < 0) + HGOTO_ERROR(H5E_VFL, H5E_CANTOPENFILE, FAIL, "unable to open member file"); } /* end if */ /* Set the EOA marker for the member */ @@ -1031,12 +1028,12 @@ H5FD__family_set_eoa(H5FD_t *_file, H5FD_mem_t type, haddr_t abs_eoa) H5_CHECK_OVERFLOW(file->memb_size, hsize_t, haddr_t); if (addr > (haddr_t)file->memb_size) { if (H5FD_set_eoa(file->memb[u], type, ((haddr_t)file->memb_size - file->pub.base_addr)) < 0) - HGOTO_ERROR(H5E_FILE, H5E_CANTINIT, FAIL, "unable to set file eoa"); + HGOTO_ERROR(H5E_VFL, H5E_CANTINIT, FAIL, "unable to set file eoa"); addr -= file->memb_size; } /* end if */ else { if (H5FD_set_eoa(file->memb[u], type, (addr - file->pub.base_addr)) < 0) - HGOTO_ERROR(H5E_FILE, H5E_CANTINIT, FAIL, "unable to set file eoa"); + HGOTO_ERROR(H5E_VFL, H5E_CANTINIT, FAIL, "unable to set file eoa"); addr = 0; } /* end else */ } /* end for */ @@ -1126,12 +1123,12 @@ H5FD__family_get_handle(H5FD_t *_file, hid_t fapl, void **file_handle) /* Get the plist structure and family offset */ if (NULL == (plist = H5P_object_verify(fapl, H5P_FILE_ACCESS))) - HGOTO_ERROR(H5E_ID, H5E_BADID, FAIL, "can't find object for ID"); + HGOTO_ERROR(H5E_VFL, H5E_BADID, FAIL, "can't find object for ID"); if (H5P_get(plist, H5F_ACS_FAMILY_OFFSET_NAME, &offset) < 0) - HGOTO_ERROR(H5E_PLIST, H5E_CANTGET, FAIL, "can't get offset for family driver"); + HGOTO_ERROR(H5E_VFL, H5E_CANTGET, FAIL, "can't get offset for family driver"); if (offset > (file->memb_size * file->nmembs)) - HGOTO_ERROR(H5E_ID, H5E_BADID, FAIL, "offset is bigger than file size"); + HGOTO_ERROR(H5E_VFL, H5E_BADID, FAIL, "offset is bigger than file size"); memb = (int)(offset / file->memb_size); ret_value = H5FD_get_vfd_handle(file->memb[memb], fapl, file_handle); @@ -1192,8 +1189,8 @@ H5FD__family_read(H5FD_t *_file, H5FD_mem_t type, hid_t dxpl_id, haddr_t addr, s assert(u < file->nmembs); - if (H5FDread(file->memb[u], type, dxpl_id, sub, req, buf) < 0) - HGOTO_ERROR(H5E_IO, H5E_READERROR, FAIL, "member file read failed"); + if (H5FD_read(file->memb[u], type, sub, req, buf) < 0) + HGOTO_ERROR(H5E_VFL, H5E_READERROR, FAIL, "member file read failed"); addr += req; buf += req; @@ -1254,8 +1251,8 @@ H5FD__family_write(H5FD_t *_file, H5FD_mem_t type, hid_t dxpl_id, haddr_t addr, assert(u < file->nmembs); - if (H5FDwrite(file->memb[u], type, dxpl_id, sub, req, buf) < 0) - HGOTO_ERROR(H5E_IO, H5E_WRITEERROR, FAIL, "member file write failed"); + if (H5FD_write(file->memb[u], type, sub, req, buf) < 0) + HGOTO_ERROR(H5E_VFL, H5E_WRITEERROR, FAIL, "member file write failed"); addr += req; buf += req; @@ -1290,7 +1287,7 @@ H5FD__family_flush(H5FD_t *_file, hid_t H5_ATTR_UNUSED dxpl_id, bool closing) nerrors++; if (nerrors) - HGOTO_ERROR(H5E_IO, H5E_BADVALUE, FAIL, "unable to flush member files"); + HGOTO_ERROR(H5E_VFL, H5E_BADVALUE, FAIL, "unable to flush member files"); done: FUNC_LEAVE_NOAPI(ret_value) @@ -1321,7 +1318,7 @@ H5FD__family_truncate(H5FD_t *_file, hid_t H5_ATTR_UNUSED dxpl_id, bool closing) nerrors++; if (nerrors) - HGOTO_ERROR(H5E_IO, H5E_BADVALUE, FAIL, "unable to flush member files"); + HGOTO_ERROR(H5E_VFL, H5E_BADVALUE, FAIL, "unable to flush member files"); done: FUNC_LEAVE_NOAPI(ret_value) @@ -1360,12 +1357,12 @@ H5FD__family_lock(H5FD_t *_file, bool rw) if (u < file->nmembs) { unsigned v; /* Local index variable */ - for (v = 0; v < u; v++) { + for (v = 0; v < u; v++) if (H5FD_unlock(file->memb[v]) < 0) /* Push error, but keep going */ HDONE_ERROR(H5E_IO, H5E_CANTUNLOCKFILE, FAIL, "unable to unlock member files"); - } /* end for */ - HGOTO_ERROR(H5E_IO, H5E_CANTLOCKFILE, FAIL, "unable to lock member files"); + + HGOTO_ERROR(H5E_VFL, H5E_CANTLOCKFILE, FAIL, "unable to lock member files"); } /* end if */ done: @@ -1394,7 +1391,7 @@ H5FD__family_unlock(H5FD_t *_file) for (u = 0; u < file->nmembs; u++) if (file->memb[u]) if (H5FD_unlock(file->memb[u]) < 0) - HGOTO_ERROR(H5E_IO, H5E_CANTUNLOCKFILE, FAIL, "unable to unlock member files"); + HGOTO_ERROR(H5E_VFL, H5E_CANTUNLOCKFILE, FAIL, "unable to unlock member files"); done: FUNC_LEAVE_NOAPI(ret_value) diff --git a/src/H5FDsec2.c b/src/H5FDsec2.c index 0a623e9cb36..99ff8df6cfe 100644 --- a/src/H5FDsec2.c +++ b/src/H5FDsec2.c @@ -700,15 +700,16 @@ H5FD__sec2_read(H5FD_t *_file, H5FD_mem_t H5_ATTR_UNUSED type, hid_t H5_ATTR_UNU int myerrno = errno; time_t mytime = time(NULL); +#ifndef H5_HAVE_PREADWRITE offset = HDlseek(file->fd, 0, SEEK_CUR); +#endif HGOTO_ERROR(H5E_IO, H5E_READERROR, FAIL, "file read failed: time = %s, filename = '%s', file descriptor = %d, errno = %d, " - "error message = '%s', buf = %p, total read size = %llu, bytes this sub-read = %llu, " - "bytes actually read = %llu, offset = %llu", - ctime(&mytime), file->filename, file->fd, myerrno, strerror(myerrno), buf, - (unsigned long long)size, (unsigned long long)bytes_in, - (unsigned long long)bytes_read, (unsigned long long)offset); + "error message = '%s', buf = %p, total read size = %zu, bytes this sub-read = %llu, " + "offset = %llu", + ctime(&mytime), file->filename, file->fd, myerrno, strerror(myerrno), buf, size, + (unsigned long long)bytes_in, (unsigned long long)offset); } /* end if */ if (0 == bytes_read) { @@ -810,15 +811,16 @@ H5FD__sec2_write(H5FD_t *_file, H5FD_mem_t H5_ATTR_UNUSED type, hid_t H5_ATTR_UN int myerrno = errno; time_t mytime = time(NULL); +#ifndef H5_HAVE_PREADWRITE offset = HDlseek(file->fd, 0, SEEK_CUR); +#endif HGOTO_ERROR(H5E_IO, H5E_WRITEERROR, FAIL, "file write failed: time = %s, filename = '%s', file descriptor = %d, errno = %d, " - "error message = '%s', buf = %p, total write size = %llu, bytes this sub-write = " - "%llu, bytes actually written = %llu, offset = %llu", - ctime(&mytime), file->filename, file->fd, myerrno, strerror(myerrno), buf, - (unsigned long long)size, (unsigned long long)bytes_in, - (unsigned long long)bytes_wrote, (unsigned long long)offset); + "error message = '%s', buf = %p, total write size = %zu, bytes this sub-write = " + "%llu, offset = %llu", + ctime(&mytime), file->filename, file->fd, myerrno, strerror(myerrno), buf, size, + (unsigned long long)bytes_in, (unsigned long long)offset); } /* end if */ assert(bytes_wrote > 0); diff --git a/src/H5Fint.c b/src/H5Fint.c index e9817b13048..f653e0b71f0 100644 --- a/src/H5Fint.c +++ b/src/H5Fint.c @@ -1786,7 +1786,7 @@ H5F__check_if_using_file_locks(H5P_genplist_t *fapl, bool *use_file_locking, boo * s: the open succeeds with flags combination from both the first and second opens * * NOTE: If the 'try' flag is true, not opening the file with the - * "non-tentative" VFD 'open' call is not treated an error; SUCCEED is + * "non-tentative" VFD 'open' call is not treated as an error; SUCCEED is * returned, with the file ptr set to NULL. If 'try' is false, failing * the "non-tentative" VFD 'open' call generates an error. * @@ -1985,7 +1985,7 @@ H5F_open(bool try, H5F_t **_file, const char *name, unsigned flags, hid_t fcpl_i if ((ci_load || ci_write) && (flags & (H5F_ACC_SWMR_READ | H5F_ACC_SWMR_WRITE))) HGOTO_ERROR(H5E_FILE, H5E_UNSUPPORTED, FAIL, "can't have both SWMR and cache image"); - /* Retain the name the file was opened with */ + /* Retain the original filename. */ file->open_name = H5MM_xstrdup(name); /* Short cuts */ diff --git a/src/H5Fmodule.h b/src/H5Fmodule.h index c76839e8e28..15f912d88fe 100644 --- a/src/H5Fmodule.h +++ b/src/H5Fmodule.h @@ -43,7 +43,7 @@ * \li The use of low-level file drivers * * This chapter assumes an understanding of the material presented in the data model chapter. For - * more information, @see @ref sec_data_model. + * more information, see \ref sec_data_model. * * \subsection subsec_file_access_modes File Access Modes * There are two issues regarding file access: @@ -101,7 +101,7 @@ * a user-definable data block; the size of data address parameters; properties of the B-trees that are * used to manage the data in the file; and certain HDF5 Library versioning information. * - * For more information, @see @ref subsubsec_file_property_lists_props. + * For more information, see \ref subsubsec_file_property_lists_props. * * This section has a more detailed discussion of file creation properties. If you have no special * requirements for these file characteristics, you can simply specify #H5P_DEFAULT for the default @@ -112,7 +112,7 @@ * settings, and parallel I/O. Data alignment, metadata block and cache sizes, and data sieve buffer * size are factors in improving I/O performance. * - * For more information, @see @ref subsubsec_file_property_lists_access. + * For more information, see \ref subsubsec_file_property_lists_access. * * This section has a more detailed discussion of file access properties. If you have no special * requirements for these file access characteristics, you can simply specify #H5P_DEFAULT for the @@ -466,8 +466,9 @@ * remain valid. Each of these file identifiers must be released by calling #H5Fclose when it is no * longer needed. * - * For more information, @see @ref subsubsec_file_property_lists_access. - * For more information, @see @ref subsec_file_property_lists. + * For more information, see \ref subsubsec_file_property_lists_access. + * + * For more information, see \ref subsec_file_property_lists. * * \subsection subsec_file_closes Closing an HDF5 File * #H5Fclose both closes a file and releases the file identifier returned by #H5Fopen or #H5Fcreate. @@ -512,7 +513,7 @@ * information for every property list function is provided in the \ref H5P * section of the HDF5 Reference Manual. * - * For more information, @see @ref sec_plist. + * For more information, @see \ref sec_plist. * * \subsubsection subsubsec_file_property_lists_create Creating a Property List * If you do not wish to rely on the default file creation and access properties, you must first create @@ -594,7 +595,7 @@ * \subsubsection subsubsec_file_property_lists_access File Access Properties * This section discusses file access properties that are not related to the low-level file drivers. File * drivers are discussed separately later in this chapter. - * For more information, @see @ref subsec_file_alternate_drivers. + * For more information, @see \ref subsec_file_alternate_drivers. * * File access property lists control various aspects of file I/O and structure. * @@ -657,7 +658,7 @@ * * HDF5 employs an extremely flexible mechanism called the virtual file layer, or VFL, for file * I/O. A full understanding of the VFL is only necessary if you plan to write your own drivers - * @see \ref VFL in the HDF5 Technical Notes. + * see \ref VFL in the HDF5 Technical Notes. * * For our * purposes here, it is sufficient to know that the low-level drivers used for file I/O reside in the @@ -690,7 +691,7 @@ * * If an application requires a special-purpose low-level driver, the VFL provides a public API for * creating one. For more information on how to create a driver, - * @see @ref VFL in the HDF5 Technical Notes. + * see \ref VFL in the HDF5 Technical Notes. * * \subsubsection subsubsec_file_alternate_drivers_id Identifying the Previously‐used File Driver * When creating a new HDF5 file, no history exists, so the file driver must be specified if it is to be @@ -888,11 +889,11 @@ * * Additional parameters may be added to these functions in the future. * - * @see + * see * HDF5 File Image Operations * section for information on more advanced usage of the Memory file driver, and - * @see + * see * Modified Region Writes * section for information on how to set write operations so that only modified regions are written * to storage. @@ -1071,7 +1072,7 @@ * name is FILE. If the function does not find an existing file, it will create one. If it does find an * existing file, it will empty the file in preparation for a new set of data. The identifier for the * "new" file will be passed back to the application program. - * For more information, @see @ref subsec_file_access_modes. + * For more information, @see \ref subsec_file_access_modes. * * Creating a file with default creation and access properties * \code @@ -1183,7 +1184,7 @@ * Note: In the code example above, loc_id is the file identifier for File1, /B is the link path to the * group where File2 is mounted, child_id is the file identifier for File2, and plist_id is a property * list identifier. - * For more information, @see @ref sec_group. + * For more information, @see \ref sec_group. * * See the entries for #H5Fmount, #H5Funmount, and #H5Lcreate_external in the HDF5 Reference Manual. * diff --git a/src/H5Gdeprec.c b/src/H5Gdeprec.c index e86bc82c738..5f9ad63756e 100644 --- a/src/H5Gdeprec.c +++ b/src/H5Gdeprec.c @@ -204,7 +204,7 @@ H5Gcreate1(hid_t loc_id, const char *name, size_t size_hint) loc_params.obj_type = H5I_get_type(loc_id); /* get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, H5I_INVALID_HID, "invalid location identifier"); /* Create the group */ @@ -262,7 +262,7 @@ H5Gopen1(hid_t loc_id, const char *name) loc_params.obj_type = H5I_get_type(loc_id); /* get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, H5I_INVALID_HID, "invalid location identifier"); /* Open the group */ @@ -320,7 +320,7 @@ H5Glink(hid_t cur_loc_id, H5G_link_t type, const char *cur_name, const char *new new_loc_params.loc_data.loc_by_name.lapl_id = H5P_LINK_ACCESS_DEFAULT; /* Get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(cur_loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(cur_loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Construct a temporary VOL object */ @@ -351,7 +351,7 @@ H5Glink(hid_t cur_loc_id, H5G_link_t type, const char *cur_name, const char *new loc_params.obj_type = H5I_get_type(cur_loc_id); /* get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(cur_loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(cur_loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Set up VOL callback arguments */ @@ -409,9 +409,9 @@ H5Glink2(hid_t cur_loc_id, const char *cur_name, H5G_link_t type, hid_t new_loc_ new_loc_params.loc_data.loc_by_name.lapl_id = H5P_LINK_ACCESS_DEFAULT; /* Get the location objects */ - if (NULL == (vol_obj1 = (H5VL_object_t *)H5I_object(cur_loc_id))) + if (NULL == (vol_obj1 = H5VL_vol_object(cur_loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); - if (NULL == (vol_obj2 = (H5VL_object_t *)H5I_object(new_loc_id))) + if (NULL == (vol_obj2 = H5VL_vol_object(new_loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Set up VOL callback arguments */ @@ -443,7 +443,7 @@ H5Glink2(hid_t cur_loc_id, const char *cur_name, H5G_link_t type, hid_t new_loc_ loc_params.obj_type = H5I_get_type(new_loc_id); /* get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(new_loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(new_loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Set up VOL callback arguments */ @@ -493,7 +493,7 @@ H5Gmove(hid_t src_loc_id, const char *src_name, const char *dst_name) loc_params2.loc_data.loc_by_name.lapl_id = H5P_LINK_ACCESS_DEFAULT; /* get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(src_loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(src_loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Move the link */ @@ -519,10 +519,35 @@ H5Gmove2(hid_t src_loc_id, const char *src_name, hid_t dst_loc_id, const char *d H5VL_loc_params_t loc_params1; H5VL_object_t *vol_obj2 = NULL; /* Object of dst_id */ H5VL_loc_params_t loc_params2; + H5I_type_t src_id_type = H5I_BADID, dst_id_type = H5I_BADID; herr_t ret_value = SUCCEED; /* Return value */ FUNC_ENTER_API(FAIL) + /* Check arguments */ + if (!src_name || !*src_name) + HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "no current name specified"); + if (!dst_name || !*dst_name) + HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "no destination name specified"); + + /* src and dst location IDs cannot both have the value of H5L_SAME_LOC */ + if (src_loc_id == H5L_SAME_LOC && dst_loc_id == H5L_SAME_LOC) + HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "current and destination should not both be H5L_SAME_LOC"); + + /* reset an ID in the case of H5L_SAME_LOC */ + if (src_loc_id == H5L_SAME_LOC) + src_loc_id = dst_loc_id; + else if (dst_loc_id == H5L_SAME_LOC) + dst_loc_id = src_loc_id; + + src_id_type = H5I_get_type(src_loc_id); + if (!(H5I_GROUP == src_id_type || H5I_FILE == src_id_type)) + HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "invalid group (or file) ID, src_loc_id"); + + dst_id_type = H5I_get_type(dst_loc_id); + if (!(H5I_GROUP == dst_id_type || H5I_FILE == dst_id_type)) + HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "invalid group (or file) ID, dst_loc_id"); + /* Set up collective metadata if appropriate */ if (H5CX_set_loc(dst_loc_id) < 0) HGOTO_ERROR(H5E_SYM, H5E_CANTSET, FAIL, "can't set collective metadata read info"); @@ -531,22 +556,20 @@ H5Gmove2(hid_t src_loc_id, const char *src_name, hid_t dst_loc_id, const char *d loc_params1.type = H5VL_OBJECT_BY_NAME; loc_params1.loc_data.loc_by_name.name = src_name; loc_params1.loc_data.loc_by_name.lapl_id = H5P_LINK_ACCESS_DEFAULT; - loc_params1.obj_type = H5I_get_type(src_loc_id); + loc_params1.obj_type = src_id_type; /* Set location parameter for destination object */ loc_params2.type = H5VL_OBJECT_BY_NAME; loc_params2.loc_data.loc_by_name.name = dst_name; loc_params2.loc_data.loc_by_name.lapl_id = H5P_LINK_ACCESS_DEFAULT; - loc_params2.obj_type = H5I_get_type(dst_loc_id); + loc_params2.obj_type = dst_id_type; - if (H5L_SAME_LOC != src_loc_id) - /* get the location object */ - if (NULL == (vol_obj1 = (H5VL_object_t *)H5I_object(src_loc_id))) - HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); - if (H5L_SAME_LOC != dst_loc_id) - /* get the location object */ - if (NULL == (vol_obj2 = (H5VL_object_t *)H5I_object(dst_loc_id))) - HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); + /* get the location object */ + if (NULL == (vol_obj1 = H5VL_vol_object(src_loc_id))) + HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); + /* get the location object */ + if (NULL == (vol_obj2 = H5VL_vol_object(dst_loc_id))) + HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Move the link */ if (H5VL_link_move(vol_obj1, &loc_params1, vol_obj2, &loc_params2, H5P_LINK_CREATE_DEFAULT, @@ -588,7 +611,7 @@ H5Gunlink(hid_t loc_id, const char *name) loc_params.loc_data.loc_by_name.lapl_id = H5P_LINK_ACCESS_DEFAULT; /* Get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Set up VOL callback arguments */ @@ -635,7 +658,7 @@ H5Gget_linkval(hid_t loc_id, const char *name, size_t size, char *buf /*out*/) loc_params.loc_data.loc_by_name.lapl_id = H5P_LINK_ACCESS_DEFAULT; /* Get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Set up VOL callback arguments */ @@ -719,8 +742,9 @@ H5Gset_comment(hid_t loc_id, const char *name, const char *comment) * * Note: Deprecated in favor of H5Oget_comment/H5Oget_comment_by_name * - * Return: Success: Number of characters in the comment. The value - * returned may be larger than the BUFSIZE argument. + * Return: Success: Number of characters in the comment, excluding the + * NULL terminator character. The value returned may be + * larger than the BUFSIZE argument. * * Failure: Negative * @@ -1166,7 +1190,7 @@ H5Gget_objname_by_idx(hid_t loc_id, hsize_t idx, char *name /*out*/, size_t size loc_params.obj_type = H5I_get_type(loc_id); /* Get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, (-1), "invalid location identifier"); /* Set up VOL callback arguments */ diff --git a/src/H5Gmodule.h b/src/H5Gmodule.h index a06d44cea75..49fc9ed9472 100644 --- a/src/H5Gmodule.h +++ b/src/H5Gmodule.h @@ -722,7 +722,7 @@ * *

    Mounting a File

    * An external link is a permanent connection between two files. A temporary connection can be set - * up with the #H5Fmount function. For more information, @see sec_file. + * up with the #H5Fmount function. For more information, @see \ref sec_file. * For more information, see the #H5Fmount function in the \ref RM. * * \subsubsection subsubsec_group_program_info Discovering Information about Objects diff --git a/src/H5Gnode.c b/src/H5Gnode.c index cc45f39b1d6..c89417750e2 100644 --- a/src/H5Gnode.c +++ b/src/H5Gnode.c @@ -288,7 +288,7 @@ H5G__node_create(H5F_t *f, H5B_ins_t H5_ATTR_UNUSED op, void *_lt_key, void H5_A assert(H5B_INS_FIRST == op); if (NULL == (sym = H5FL_CALLOC(H5G_node_t))) - HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "memory allocation failed"); + HGOTO_ERROR(H5E_SYM, H5E_CANTALLOC, FAIL, "memory allocation failed"); sym->node_size = H5G_NODE_SIZE(f); if (HADDR_UNDEF == (*addr_p = H5MF_alloc(f, H5FD_MEM_BTREE, (hsize_t)sym->node_size))) HGOTO_ERROR(H5E_SYM, H5E_CANTINIT, FAIL, "unable to allocate file space"); @@ -309,13 +309,12 @@ H5G__node_create(H5F_t *f, H5B_ins_t H5_ATTR_UNUSED op, void *_lt_key, void H5_A rt_key->offset = 0; done: - if (ret_value < 0) { + if (ret_value < 0) if (sym != NULL) { if (sym->entry != NULL) sym->entry = H5FL_SEQ_FREE(H5G_entry_t, sym->entry); sym = H5FL_FREE(H5G_node_t, sym); } /* end if */ - } /* end if */ FUNC_LEAVE_NOAPI(ret_value) } /* end H5G__node_create() */ @@ -472,7 +471,7 @@ H5G__node_found(H5F_t *f, haddr_t addr, const void H5_ATTR_UNUSED *_lt_key, bool * Load the symbol table node for exclusive access. */ if (NULL == (sn = (H5G_node_t *)H5AC_protect(f, H5AC_SNODE, addr, f, H5AC__READ_ONLY_FLAG))) - HGOTO_ERROR(H5E_SYM, H5E_CANTLOAD, FAIL, "unable to protect symbol table node"); + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, FAIL, "unable to protect symbol table node"); /* * Binary search. @@ -504,7 +503,7 @@ H5G__node_found(H5F_t *f, haddr_t addr, const void H5_ATTR_UNUSED *_lt_key, bool done: if (sn && H5AC_unprotect(f, H5AC_SNODE, addr, sn, H5AC__NO_FLAGS_SET) < 0) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to release symbol table node"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, FAIL, "unable to release symbol table node"); FUNC_LEAVE_NOAPI(ret_value) } /* end H5G__node_found() */ @@ -569,7 +568,7 @@ H5G__node_insert(H5F_t *f, haddr_t addr, void H5_ATTR_UNUSED *_lt_key, bool H5_A * Load the symbol node. */ if (NULL == (sn = (H5G_node_t *)H5AC_protect(f, H5AC_SNODE, addr, f, H5AC__NO_FLAGS_SET))) - HGOTO_ERROR(H5E_SYM, H5E_CANTLOAD, H5B_INS_ERROR, "unable to protect symbol table node"); + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, H5B_INS_ERROR, "unable to protect symbol table node"); /* * Where does the new symbol get inserted? We use a binary search. @@ -609,7 +608,7 @@ H5G__node_insert(H5F_t *f, haddr_t addr, void H5_ATTR_UNUSED *_lt_key, bool H5_A HGOTO_ERROR(H5E_SYM, H5E_CANTINIT, H5B_INS_ERROR, "unable to split symbol table node"); if (NULL == (snrt = (H5G_node_t *)H5AC_protect(f, H5AC_SNODE, *new_node_p, f, H5AC__NO_FLAGS_SET))) - HGOTO_ERROR(H5E_SYM, H5E_CANTLOAD, H5B_INS_ERROR, "unable to split symbol table node"); + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, H5B_INS_ERROR, "unable to split symbol table node"); H5MM_memcpy(snrt->entry, sn->entry + H5F_SYM_LEAF_K(f), H5F_SYM_LEAF_K(f) * sizeof(H5G_entry_t)); snrt->nsyms = H5F_SYM_LEAF_K(f); @@ -662,9 +661,9 @@ H5G__node_insert(H5F_t *f, haddr_t addr, void H5_ATTR_UNUSED *_lt_key, bool H5_A done: if (snrt && H5AC_unprotect(f, H5AC_SNODE, *new_node_p, snrt, snrt_flags) < 0) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, H5B_INS_ERROR, "unable to release symbol table node"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, H5B_INS_ERROR, "unable to release symbol table node"); if (sn && H5AC_unprotect(f, H5AC_SNODE, addr, sn, sn_flags) < 0) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, H5B_INS_ERROR, "unable to release symbol table node"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, H5B_INS_ERROR, "unable to release symbol table node"); FUNC_LEAVE_NOAPI(ret_value) } /* end H5G__node_insert() */ @@ -718,7 +717,7 @@ H5G__node_remove(H5F_t *f, haddr_t addr, void H5_ATTR_NDEBUG_UNUSED *_lt_key /*i /* Load the symbol table */ if (NULL == (sn = (H5G_node_t *)H5AC_protect(f, H5AC_SNODE, addr, f, H5AC__NO_FLAGS_SET))) - HGOTO_ERROR(H5E_SYM, H5E_CANTLOAD, H5B_INS_ERROR, "unable to protect symbol table node"); + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, H5B_INS_ERROR, "unable to protect symbol table node"); /* "Normal" removal of a single entry from the symbol table node */ if (udata->common.name != NULL) { @@ -860,7 +859,7 @@ H5G__node_remove(H5F_t *f, haddr_t addr, void H5_ATTR_NDEBUG_UNUSED *_lt_key /*i tmp_oloc.file = f; /* Reduce the link count for all entries in this node */ - for (idx = 0; idx < sn->nsyms; idx++) { + for (idx = 0; idx < sn->nsyms; idx++) if (!(H5G_CACHED_SLINK == sn->entry[idx].type)) { /* Decrement the reference count */ assert(H5_addr_defined(sn->entry[idx].header)); @@ -870,7 +869,6 @@ H5G__node_remove(H5F_t *f, haddr_t addr, void H5_ATTR_NDEBUG_UNUSED *_lt_key /*i HGOTO_ERROR(H5E_SYM, H5E_CANTDELETE, H5B_INS_ERROR, "unable to decrement object link count"); } /* end if */ - } /* end for */ /* * We are about to remove all the symbols in this node. Free this @@ -919,7 +917,7 @@ H5G__node_iterate(H5F_t *f, const void H5_ATTR_UNUSED *_lt_key, haddr_t addr, /* Protect the symbol table node & local heap while we iterate over entries */ if (NULL == (sn = (H5G_node_t *)H5AC_protect(f, H5AC_SNODE, addr, f, H5AC__READ_ONLY_FLAG))) - HGOTO_ERROR(H5E_SYM, H5E_CANTLOAD, H5_ITER_ERROR, "unable to load symbol table node"); + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, H5_ITER_ERROR, "unable to load symbol table node"); /* * Iterate over the symbol table node entries. @@ -954,7 +952,7 @@ H5G__node_iterate(H5F_t *f, const void H5_ATTR_UNUSED *_lt_key, haddr_t addr, done: /* Release resources */ if (sn && H5AC_unprotect(f, H5AC_SNODE, addr, sn, H5AC__NO_FLAGS_SET) < 0) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, H5_ITER_ERROR, "unable to release object header"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, H5_ITER_ERROR, "unable to release object header"); FUNC_LEAVE_NOAPI(ret_value) } /* end H5G__node_iterate() */ @@ -988,13 +986,13 @@ H5G__node_sumup(H5F_t *f, const void H5_ATTR_UNUSED *_lt_key, haddr_t addr, /* Find the object node and add the number of symbol entries. */ if (NULL == (sn = (H5G_node_t *)H5AC_protect(f, H5AC_SNODE, addr, f, H5AC__READ_ONLY_FLAG))) - HGOTO_ERROR(H5E_SYM, H5E_CANTLOAD, H5_ITER_ERROR, "unable to load symbol table node"); + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, H5_ITER_ERROR, "unable to load symbol table node"); *num_objs += sn->nsyms; done: if (sn && H5AC_unprotect(f, H5AC_SNODE, addr, sn, H5AC__NO_FLAGS_SET) < 0) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, H5_ITER_ERROR, "unable to release object header"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, H5_ITER_ERROR, "unable to release object header"); FUNC_LEAVE_NOAPI(ret_value) } /* end H5G__node_sumup() */ @@ -1029,7 +1027,7 @@ H5G__node_by_idx(H5F_t *f, const void H5_ATTR_UNUSED *_lt_key, haddr_t addr, /* Get a pointer to the symbol table node */ if (NULL == (sn = (H5G_node_t *)H5AC_protect(f, H5AC_SNODE, addr, f, H5AC__READ_ONLY_FLAG))) - HGOTO_ERROR(H5E_SYM, H5E_CANTLOAD, H5_ITER_ERROR, "unable to load symbol table node"); + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, H5_ITER_ERROR, "unable to load symbol table node"); /* Find the node, locate the object symbol table entry and retrieve the name */ if (udata->idx >= udata->num_objs && udata->idx < (udata->num_objs + sn->nsyms)) { @@ -1051,7 +1049,7 @@ H5G__node_by_idx(H5F_t *f, const void H5_ATTR_UNUSED *_lt_key, haddr_t addr, done: if (sn && H5AC_unprotect(f, H5AC_SNODE, addr, sn, H5AC__NO_FLAGS_SET) < 0) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, H5_ITER_ERROR, "unable to release object header"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, H5_ITER_ERROR, "unable to release object header"); FUNC_LEAVE_NOAPI(ret_value) } /* end H5G__node_by_idx() */ @@ -1084,14 +1082,14 @@ H5G__node_init(H5F_t *f) /* Allocate & initialize global info for the shared structure */ if (NULL == (shared = H5B_shared_new(f, H5B_SNODE, sizeof_rkey))) - HGOTO_ERROR(H5E_BTREE, H5E_NOSPACE, FAIL, "memory allocation failed for shared B-tree info"); + HGOTO_ERROR(H5E_SYM, H5E_CANTINIT, FAIL, "memory allocation failed for shared B-tree info"); /* Set up the "local" information for this file's groups */ /* */ /* Make shared B-tree info reference counted */ if (H5F_SET_GRP_BTREE_SHARED(f, H5UC_create(shared, H5B_shared_free)) < 0) - HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, FAIL, "can't create ref-count wrapper for shared B-tree info"); + HGOTO_ERROR(H5E_SYM, H5E_CANTINIT, FAIL, "can't create ref-count wrapper for shared B-tree info"); done: FUNC_LEAVE_NOAPI(ret_value) @@ -1155,7 +1153,7 @@ H5G__node_copy(H5F_t *f, const void H5_ATTR_UNUSED *_lt_key, haddr_t addr, const /* load the symbol table into memory from the source file */ if (NULL == (sn = (H5G_node_t *)H5AC_protect(f, H5AC_SNODE, addr, f, H5AC__READ_ONLY_FLAG))) - HGOTO_ERROR(H5E_SYM, H5E_CANTLOAD, H5_ITER_ERROR, "unable to load symbol table node"); + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, H5_ITER_ERROR, "unable to load symbol table node"); /* copy object in this node one by one */ for (i = 0; i < sn->nsyms; i++) { @@ -1223,7 +1221,7 @@ H5G__node_copy(H5F_t *f, const void H5_ATTR_UNUSED *_lt_key, haddr_t addr, const /* Copy the shared object from source to destination */ if (H5O_copy_header_map(&tmp_src_oloc, &new_dst_oloc, cpy_info, true, &obj_type, (void **)&cpy_udata) < 0) - HGOTO_ERROR(H5E_OHDR, H5E_CANTCOPY, H5_ITER_ERROR, "unable to copy object"); + HGOTO_ERROR(H5E_SYM, H5E_CANTCOPY, H5_ITER_ERROR, "unable to copy object"); /* Set up object creation info for symbol table insertion. Only * case so far is for inserting old-style groups (for caching stab @@ -1247,7 +1245,7 @@ H5G__node_copy(H5F_t *f, const void H5_ATTR_UNUSED *_lt_key, haddr_t addr, const lnk.type = H5L_TYPE_SOFT; if ((lnk.u.soft.name = (char *)H5HL_offset_into(udata->src_heap, src_ent->cache.slink.lval_offset)) == NULL) - HGOTO_ERROR(H5E_OHDR, H5E_CANTGET, H5_ITER_ERROR, "unable to get link name"); + HGOTO_ERROR(H5E_SYM, H5E_CANTGET, H5_ITER_ERROR, "unable to get link name"); /* Sanity check soft link name, to detect running off the end of the heap block */ max_link_len = udata->src_block_size - src_ent->cache.slink.lval_offset; @@ -1288,7 +1286,7 @@ H5G__node_copy(H5F_t *f, const void H5_ATTR_UNUSED *_lt_key, haddr_t addr, const done: if (sn && H5AC_unprotect(f, H5AC_SNODE, addr, sn, H5AC__NO_FLAGS_SET) < 0) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, H5_ITER_ERROR, "unable to release object header"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, H5_ITER_ERROR, "unable to release object header"); FUNC_LEAVE_NOAPI(ret_value) } /* end H5G__node_copy() */ @@ -1325,7 +1323,7 @@ H5G__node_build_table(H5F_t *f, const void H5_ATTR_UNUSED *_lt_key, haddr_t addr * because we're about to call an application function. */ if (NULL == (sn = (H5G_node_t *)H5AC_protect(f, H5AC_SNODE, addr, f, H5AC__READ_ONLY_FLAG))) - HGOTO_ERROR(H5E_SYM, H5E_CANTLOAD, H5_ITER_ERROR, "unable to load symbol table node"); + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, H5_ITER_ERROR, "unable to load symbol table node"); /* Check if the link table needs to be extended */ if ((udata->ltable->nlinks + sn->nsyms) >= udata->alloc_nlinks) { @@ -1335,7 +1333,7 @@ H5G__node_build_table(H5F_t *f, const void H5_ATTR_UNUSED *_lt_key, haddr_t addr /* Re-allocate the link table */ if (NULL == (x = (H5O_link_t *)H5MM_realloc(udata->ltable->lnks, sizeof(H5O_link_t) * na))) - HGOTO_ERROR(H5E_RESOURCE, H5E_NOSPACE, H5_ITER_ERROR, "memory allocation failed"); + HGOTO_ERROR(H5E_SYM, H5E_CANTALLOC, H5_ITER_ERROR, "memory allocation failed"); udata->ltable->lnks = x; } /* end if */ @@ -1355,7 +1353,7 @@ H5G__node_build_table(H5F_t *f, const void H5_ATTR_UNUSED *_lt_key, haddr_t addr done: /* Release the locked items */ if (sn && H5AC_unprotect(f, H5AC_SNODE, addr, sn, H5AC__NO_FLAGS_SET) < 0) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, H5_ITER_ERROR, "unable to release object header"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, H5_ITER_ERROR, "unable to release object header"); FUNC_LEAVE_NOAPI(ret_value) } /* end H5G__node_build_table() */ @@ -1400,9 +1398,8 @@ H5G__node_iterate_size(H5F_t *f, const void H5_ATTR_UNUSED *_lt_key, haddr_t H5_ herr_t H5G_node_debug(H5F_t *f, haddr_t addr, FILE *stream, int indent, int fwidth, haddr_t heap_addr) { - H5G_node_t *sn = NULL; - H5HL_t *heap = NULL; - unsigned u; /* Local index variable */ + H5G_node_t *sn = NULL; + H5HL_t *heap = NULL; herr_t ret_value = SUCCEED; /* Return value */ FUNC_ENTER_NOAPI(FAIL) @@ -1419,22 +1416,17 @@ H5G_node_debug(H5F_t *f, haddr_t addr, FILE *stream, int indent, int fwidth, had /* Pin the heap down in memory */ if (heap_addr > 0 && H5_addr_defined(heap_addr)) if (NULL == (heap = H5HL_protect(f, heap_addr, H5AC__READ_ONLY_FLAG))) - HGOTO_ERROR(H5E_SYM, H5E_CANTLOAD, FAIL, "unable to protect symbol table heap"); + HGOTO_ERROR(H5E_SYM, H5E_CANTPROTECT, FAIL, "unable to protect symbol table heap"); - /* - * If we couldn't load the symbol table node, then try loading the - * B-tree node. - */ - if (NULL == (sn = (H5G_node_t *)H5AC_protect(f, H5AC_SNODE, addr, f, H5AC__READ_ONLY_FLAG))) { - H5G_bt_common_t udata; /*data to pass through B-tree */ + /* Try loading symbol table node */ + H5E_PAUSE_ERRORS + { + sn = (H5G_node_t *)H5AC_protect(f, H5AC_SNODE, addr, f, H5AC__READ_ONLY_FLAG); + } + H5E_RESUME_ERRORS + if (sn) { + unsigned u; /* Local index variable */ - H5E_clear_stack(); /* discard that error */ - udata.heap = heap; - udata.block_size = H5HL_heap_get_size(heap); - if (H5B_debug(f, addr, stream, indent, fwidth, H5B_SNODE, &udata) < 0) - HGOTO_ERROR(H5E_SYM, H5E_CANTLOAD, FAIL, "unable to debug B-tree node"); - } /* end if */ - else { fprintf(stream, "%*sSymbol Table Node...\n", indent, ""); fprintf(stream, "%*s%-*s %s\n", indent, "", fwidth, "Dirty:", sn->cache_info.is_dirty ? "Yes" : "No"); fprintf(stream, "%*s%-*s %u\n", indent, "", fwidth, @@ -1460,12 +1452,24 @@ H5G_node_debug(H5F_t *f, haddr_t addr, FILE *stream, int indent, int fwidth, had H5G__ent_debug(sn->entry + u, stream, indent, fwidth, heap); } /* end for */ } /* end if */ + /* + * If we couldn't load the symbol table node, then try loading the + * B-tree node. + */ + else { + H5G_bt_common_t udata; /*data to pass through B-tree */ + + udata.heap = heap; + udata.block_size = H5HL_heap_get_size(heap); + if (H5B_debug(f, addr, stream, indent, fwidth, H5B_SNODE, &udata) < 0) + HGOTO_ERROR(H5E_SYM, H5E_BADVALUE, FAIL, "unable to debug B-tree node"); + } /* end else */ done: if (sn && H5AC_unprotect(f, H5AC_SNODE, addr, sn, H5AC__NO_FLAGS_SET) < 0) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to release symbol table node"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, FAIL, "unable to release symbol table node"); if (heap && H5HL_unprotect(heap) < 0) - HDONE_ERROR(H5E_SYM, H5E_PROTECT, FAIL, "unable to unprotect symbol table heap"); + HDONE_ERROR(H5E_SYM, H5E_CANTUNPROTECT, FAIL, "unable to unprotect symbol table heap"); FUNC_LEAVE_NOAPI(ret_value) } /* end H5G_node_debug() */ diff --git a/src/H5L.c b/src/H5L.c index cbc584f6b44..3616cb75a59 100644 --- a/src/H5L.c +++ b/src/H5L.c @@ -94,7 +94,8 @@ H5Lmove(hid_t src_loc_id, const char *src_name, hid_t dst_loc_id, const char *ds H5VL_object_t *vol_obj2 = NULL; /* Object of dst_id */ H5VL_loc_params_t loc_params1; H5VL_loc_params_t loc_params2; - H5VL_object_t tmp_vol_obj; /* Temporary object */ + H5VL_object_t tmp_vol_obj; /* Temporary object */ + H5I_type_t src_id_type = H5I_BADID, dst_id_type = H5I_BADID; herr_t ret_value = SUCCEED; /* Return value */ FUNC_ENTER_API(FAIL) @@ -106,6 +107,21 @@ H5Lmove(hid_t src_loc_id, const char *src_name, hid_t dst_loc_id, const char *ds HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "no current name specified"); if (!dst_name || !*dst_name) HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "no destination name specified"); + + /* reset an ID in the case of H5L_SAME_LOC */ + if (src_loc_id == H5L_SAME_LOC) + src_loc_id = dst_loc_id; + else if (dst_loc_id == H5L_SAME_LOC) + dst_loc_id = src_loc_id; + + /* verify that src and dst IDs are either a file or a group ID */ + src_id_type = H5I_get_type(src_loc_id); + if (!(H5I_GROUP == src_id_type || H5I_FILE == src_id_type)) + HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "invalid group (or file) ID, src_loc_id"); + dst_id_type = H5I_get_type(dst_loc_id); + if (!(H5I_GROUP == dst_id_type || H5I_FILE == dst_id_type)) + HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "invalid group (or file) ID, dst_loc_id"); + if (lcpl_id != H5P_DEFAULT && (true != H5P_isa_class(lcpl_id, H5P_LINK_CREATE))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a link creation property list"); @@ -117,30 +133,27 @@ H5Lmove(hid_t src_loc_id, const char *src_name, hid_t dst_loc_id, const char *ds H5CX_set_lcpl(lcpl_id); /* Verify access property list and set up collective metadata if appropriate */ - if (H5CX_set_apl(&lapl_id, H5P_CLS_LACC, ((src_loc_id != H5L_SAME_LOC) ? src_loc_id : dst_loc_id), true) < - 0) + if (H5CX_set_apl(&lapl_id, H5P_CLS_LACC, dst_loc_id, true) < 0) HGOTO_ERROR(H5E_LINK, H5E_CANTSET, FAIL, "can't set access property list info"); /* Set location parameter for source object */ loc_params1.type = H5VL_OBJECT_BY_NAME; loc_params1.loc_data.loc_by_name.name = src_name; loc_params1.loc_data.loc_by_name.lapl_id = lapl_id; - loc_params1.obj_type = H5I_get_type(src_loc_id); + loc_params1.obj_type = src_id_type; /* Set location parameter for destination object */ loc_params2.type = H5VL_OBJECT_BY_NAME; loc_params2.loc_data.loc_by_name.name = dst_name; loc_params2.loc_data.loc_by_name.lapl_id = lapl_id; - loc_params2.obj_type = H5I_get_type(dst_loc_id); + loc_params2.obj_type = dst_id_type; - if (H5L_SAME_LOC != src_loc_id) - /* Get the location object */ - if (NULL == (vol_obj1 = H5VL_vol_object(src_loc_id))) - HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); - if (H5L_SAME_LOC != dst_loc_id) - /* Get the location object */ - if (NULL == (vol_obj2 = H5VL_vol_object(dst_loc_id))) - HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); + /* Get the location object */ + if (NULL == (vol_obj1 = H5VL_vol_object(src_loc_id))) + HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); + /* Get the location object */ + if (NULL == (vol_obj2 = H5VL_vol_object(dst_loc_id))) + HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Make sure that the VOL connectors are the same */ if (vol_obj1 && vol_obj2) { @@ -195,7 +208,8 @@ H5Lcopy(hid_t src_loc_id, const char *src_name, hid_t dst_loc_id, const char *ds H5VL_loc_params_t loc_params1; H5VL_object_t *vol_obj2 = NULL; /* Object of dst_id */ H5VL_loc_params_t loc_params2; - H5VL_object_t tmp_vol_obj; /* Temporary object */ + H5VL_object_t tmp_vol_obj; /* Temporary object */ + H5I_type_t src_id_type = H5I_BADID, dst_id_type = H5I_BADID; herr_t ret_value = SUCCEED; /* Return value */ FUNC_ENTER_API(FAIL) @@ -210,6 +224,20 @@ H5Lcopy(hid_t src_loc_id, const char *src_name, hid_t dst_loc_id, const char *ds if (lcpl_id != H5P_DEFAULT && (true != H5P_isa_class(lcpl_id, H5P_LINK_CREATE))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "not a link creation property list"); + /* reset an ID in the case of H5L_SAME_LOC */ + if (src_loc_id == H5L_SAME_LOC) + src_loc_id = dst_loc_id; + else if (dst_loc_id == H5L_SAME_LOC) + dst_loc_id = src_loc_id; + + /* verify that src and dst IDs are either a file or a group ID */ + src_id_type = H5I_get_type(src_loc_id); + if (!(H5I_GROUP == src_id_type || H5I_FILE == src_id_type) && src_loc_id != H5L_SAME_LOC) + HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "invalid group (or file) ID, src_loc_id"); + dst_id_type = H5I_get_type(dst_loc_id); + if (!(H5I_GROUP == dst_id_type || H5I_FILE == dst_id_type) && dst_loc_id != H5L_SAME_LOC) + HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "invalid group (or file) ID, dst_loc_id"); + /* Check the link create property list */ if (H5P_DEFAULT == lcpl_id) lcpl_id = H5P_LINK_CREATE_DEFAULT; @@ -226,22 +254,20 @@ H5Lcopy(hid_t src_loc_id, const char *src_name, hid_t dst_loc_id, const char *ds loc_params1.type = H5VL_OBJECT_BY_NAME; loc_params1.loc_data.loc_by_name.name = src_name; loc_params1.loc_data.loc_by_name.lapl_id = lapl_id; - loc_params1.obj_type = H5I_get_type(src_loc_id); + loc_params1.obj_type = src_id_type; /* Set location parameter for destination object */ loc_params2.type = H5VL_OBJECT_BY_NAME; loc_params2.loc_data.loc_by_name.name = dst_name; loc_params2.loc_data.loc_by_name.lapl_id = lapl_id; - loc_params2.obj_type = H5I_get_type(dst_loc_id); + loc_params2.obj_type = dst_id_type; - if (H5L_SAME_LOC != src_loc_id) - /* Get the location object */ - if (NULL == (vol_obj1 = H5VL_vol_object(src_loc_id))) - HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); - if (H5L_SAME_LOC != dst_loc_id) - /* Get the location object */ - if (NULL == (vol_obj2 = H5VL_vol_object(dst_loc_id))) - HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); + /* Get the location object */ + if (NULL == (vol_obj1 = H5VL_vol_object(src_loc_id))) + HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); + /* Get the location object */ + if (NULL == (vol_obj2 = H5VL_vol_object(dst_loc_id))) + HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Make sure that the VOL connectors are the same */ if (vol_obj1 && vol_obj2) { diff --git a/src/H5M.c b/src/H5M.c index b196f0930b8..bb8b4d9882b 100644 --- a/src/H5M.c +++ b/src/H5M.c @@ -359,8 +359,7 @@ H5Mcreate_async(const char *app_file, const char *app_func, unsigned app_line, h * the in-file datatype for values is defined by VAL_TYPE_ID. * LOC_ID specifies the file to create the map object, but no * link to the object is created. Other options can be - * specified through the property lists LCPL_ID, MCPL_ID, and - * MAPL_ID. + * specified through the property lists MCPL_ID and MAPL_ID. * * The resulting ID should be linked into the file with * H5Olink or it will be deleted when closed. @@ -397,7 +396,7 @@ H5Mcreate_anon(hid_t loc_id, hid_t key_type_id, hid_t val_type_id, hid_t mcpl_id HGOTO_ERROR(H5E_MAP, H5E_CANTSET, H5I_INVALID_HID, "can't set access property list info"); /* get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, H5I_INVALID_HID, "invalid location identifier"); /* Set location parameters */ @@ -693,7 +692,7 @@ H5Mget_key_type(hid_t map_id) FUNC_ENTER_API(H5I_INVALID_HID) /* Check args */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object_verify(map_id, H5I_MAP))) + if (NULL == (vol_obj = H5VL_vol_object_verify(map_id, H5I_MAP))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, H5I_INVALID_HID, "invalid map identifier"); /* Set up VOL callback arguments */ @@ -737,7 +736,7 @@ H5Mget_val_type(hid_t map_id) FUNC_ENTER_API(H5I_INVALID_HID) /* Check args */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object_verify(map_id, H5I_MAP))) + if (NULL == (vol_obj = H5VL_vol_object_verify(map_id, H5I_MAP))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, H5I_INVALID_HID, "invalid map identifier"); /* Set up VOL callback arguments */ @@ -781,7 +780,7 @@ H5Mget_create_plist(hid_t map_id) FUNC_ENTER_API(H5I_INVALID_HID) /* Check args */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object_verify(map_id, H5I_MAP))) + if (NULL == (vol_obj = H5VL_vol_object_verify(map_id, H5I_MAP))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, H5I_INVALID_HID, "invalid map identifier"); /* Set up VOL callback arguments */ @@ -828,7 +827,7 @@ H5Mget_access_plist(hid_t map_id) FUNC_ENTER_API(H5I_INVALID_HID) /* Check args */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object_verify(map_id, H5I_MAP))) + if (NULL == (vol_obj = H5VL_vol_object_verify(map_id, H5I_MAP))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, H5I_INVALID_HID, "invalid map identifier"); /* Set up VOL callback arguments */ @@ -871,7 +870,7 @@ H5Mget_count(hid_t map_id, hsize_t *count /*out*/, hid_t dxpl_id) FUNC_ENTER_API(H5I_INVALID_HID) /* Check args */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object_verify(map_id, H5I_MAP))) + if (NULL == (vol_obj = H5VL_vol_object_verify(map_id, H5I_MAP))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, H5I_INVALID_HID, "invalid map identifier"); /* Get the default dataset transfer property list if the user didn't provide one */ @@ -927,7 +926,7 @@ H5M__put_api_common(hid_t map_id, hid_t key_mem_type_id, const void *key, hid_t HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "invalid value memory datatype ID"); /* Get map pointer */ - if (NULL == (*vol_obj_ptr = (H5VL_object_t *)H5I_object_verify(map_id, H5I_MAP))) + if (NULL == (*vol_obj_ptr = H5VL_vol_object_verify(map_id, H5I_MAP))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "map_id is not a map ID"); /* Get the default dataset transfer property list if the user didn't provide one */ @@ -1056,7 +1055,7 @@ H5M__get_api_common(hid_t map_id, hid_t key_mem_type_id, const void *key, hid_t HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "invalid value memory datatype ID"); /* Get map pointer */ - if (NULL == (*vol_obj_ptr = (H5VL_object_t *)H5I_object_verify(map_id, H5I_MAP))) + if (NULL == (*vol_obj_ptr = H5VL_vol_object_verify(map_id, H5I_MAP))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "map_id is not a map ID"); /* Get the default dataset transfer property list if the user didn't provide one */ @@ -1187,7 +1186,7 @@ H5Mexists(hid_t map_id, hid_t key_mem_type_id, const void *key, hbool_t *exists, HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "invalid key memory datatype ID"); /* Get map pointer */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object_verify(map_id, H5I_MAP))) + if (NULL == (vol_obj = H5VL_vol_object_verify(map_id, H5I_MAP))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "map_id is not a map ID"); /* Get the default dataset transfer property list if the user didn't provide one */ @@ -1263,7 +1262,7 @@ H5Miterate(hid_t map_id, hsize_t *idx, hid_t key_mem_type_id, H5M_iterate_t op, HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "no operator specified"); /* Get map pointer */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object_verify(map_id, H5I_MAP))) + if (NULL == (vol_obj = H5VL_vol_object_verify(map_id, H5I_MAP))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "map_id is not a map ID"); /* Get the default dataset transfer property list if the user didn't provide one */ @@ -1348,7 +1347,7 @@ H5Miterate_by_name(hid_t loc_id, const char *map_name, hsize_t *idx, hid_t key_m HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "no operator specified"); /* Get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Get the default dataset transfer property list if the user didn't provide one */ @@ -1412,7 +1411,7 @@ H5Mdelete(hid_t map_id, hid_t key_mem_type_id, const void *key, hid_t dxpl_id) HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, FAIL, "invalid key memory datatype ID"); /* Get map pointer */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object_verify(map_id, H5I_MAP))) + if (NULL == (vol_obj = H5VL_vol_object_verify(map_id, H5I_MAP))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "map_id is not a map ID"); /* Get the default dataset transfer property list if the user didn't provide one */ diff --git a/src/H5Mpublic.h b/src/H5Mpublic.h index 44c78fe064f..3d8a8c7f7c8 100644 --- a/src/H5Mpublic.h +++ b/src/H5Mpublic.h @@ -245,9 +245,27 @@ H5_DLL hid_t H5Mcreate_async(hid_t loc_id, const char *name, hid_t key_type_id, /** * \ingroup H5M * - * \brief + * \brief Creates a map object without linking it into a file * - * \details + * \fgdta_loc_id + * \type_id{key_type_id} + * \type_id{val_type_id} + * \mcpl_id + * \mapl_id + * \return \hid_t{map object} + * The resulting ID should be linked into the file with H5Olink or it + * will be deleted when closed. + * + * \details H5Mcreate_anon() creates a new map object for storing key-value + * pairs. The in-file datatype for keys is defined by \p key_type_id + * and the in-file datatype for values is defined by \p val_type_id. \p + * loc_id specifies the file to create the map object, but no link to + * the object is created. Other options can be specified through the + * property lists \p mcpl_id and \p mapl_id. + * + * The new map should be linked into the group hierarchy before being + * closed or it will be deleted. The map should be closed when the + * caller no longer requires it. * * \since 1.12.0 * diff --git a/src/H5O.c b/src/H5O.c index 26340aa567a..39887b51a29 100644 --- a/src/H5O.c +++ b/src/H5O.c @@ -359,7 +359,7 @@ H5Oopen_by_token(hid_t loc_id, H5O_token_t token) HGOTO_ERROR(H5E_ARGS, H5E_BADVALUE, H5I_INVALID_HID, "can't open H5O_TOKEN_UNDEF"); /* Get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, H5I_INVALID_HID, "invalid location identifier"); /* Get object type */ @@ -436,7 +436,7 @@ H5O__copy_api_common(hid_t src_loc_id, const char *src_name, hid_t dst_loc_id, c HGOTO_ERROR(H5E_OHDR, H5E_CANTSET, FAIL, "can't set object access arguments"); /* get the object */ - if (NULL == (*vol_obj_ptr = (H5VL_object_t *)H5I_object(dst_loc_id))) + if (NULL == (*vol_obj_ptr = H5VL_vol_object(dst_loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); loc_params2.type = H5VL_OBJECT_BY_SELF; loc_params2.obj_type = H5I_get_type(dst_loc_id); diff --git a/src/H5Odeprec.c b/src/H5Odeprec.c index f74ec542d6f..37a3996c1e6 100644 --- a/src/H5Odeprec.c +++ b/src/H5Odeprec.c @@ -347,7 +347,7 @@ H5Oopen_by_addr(hid_t loc_id, haddr_t addr) FUNC_ENTER_API(H5I_INVALID_HID) /* Get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, H5I_INVALID_HID, "invalid location identifier"); /* Get object type */ diff --git a/src/H5Odtype.c b/src/H5Odtype.c index 24671b02107..b2e6c8f65be 100644 --- a/src/H5Odtype.c +++ b/src/H5Odtype.c @@ -135,7 +135,7 @@ H5O__dtype_decode_helper(unsigned *ioflags /*in,out*/, const uint8_t **pp, H5T_t * that case is impossible. * * Instead of using our normal H5_IS_BUFFER_OVERFLOW macro, use - * H5_IS_KNOWN_BUFFER_OVERFLOW, which will skip the check when the + * H5_IS_KNOWN_BUFFER_OVERFLOW, which will skip the check when * we're decoding a buffer from H5Tconvert(). * * Even if this is fixed at some point in the future, as long as we diff --git a/src/H5PLmodule.h b/src/H5PLmodule.h index f034e7c6631..1aedc2783fe 100644 --- a/src/H5PLmodule.h +++ b/src/H5PLmodule.h @@ -276,10 +276,12 @@ * \endcode * * See the documentation at - * hdf5_plugins/docs folder. In + * hdf5_plugins/docs folder. In * particular: - * INSTALL_With_CMake - * USING_HDF5_AND_CMake + * INSTALL_With_CMake + * USING_HDF5_AND_CMake */ /** diff --git a/src/H5Pmodule.h b/src/H5Pmodule.h index ef300f9312a..8ac6f86eed9 100644 --- a/src/H5Pmodule.h +++ b/src/H5Pmodule.h @@ -979,7 +979,7 @@ *
    * \snippet{doc} tables/propertyLists.dox lcpl_table *
    - * @see STRCPL + * @see @ref STRCPL * * \defgroup ACPL Attribute Creation Properties * \ingroup STRCPL @@ -988,7 +988,7 @@ * \snippet{doc} tables/propertyLists.dox acpl_table * * - * @see STRCPL + * @see @ref STRCPL * * \defgroup LAPL Link Access Properties * \ingroup H5P diff --git a/src/H5Ppublic.h b/src/H5Ppublic.h index 06921ac0e32..e53d3f6e34d 100644 --- a/src/H5Ppublic.h +++ b/src/H5Ppublic.h @@ -5205,7 +5205,7 @@ H5_DLL herr_t H5Pset_mdc_config(hid_t plist_id, H5AC_cache_config_t *config_ptr) * current state of the logging flags. * * The log format is described in [Metadata Cache Logging] - * (https://\DSPURL/Fine-tuning+the+Metadata+Cache). + * (https://\DOCURL/advanced_topics/Fine-tuning+the+Metadata+Cache). * * \since 1.10.0 * diff --git a/src/H5Smodule.h b/src/H5Smodule.h index 2dc8fe127d6..b9897485405 100644 --- a/src/H5Smodule.h +++ b/src/H5Smodule.h @@ -53,7 +53,7 @@ * sub‐sampling, and scatter‐gather access to datasets. * * \subsection subsec_dataspace_function Dataspace Function Summaries - * @see H5S reference manual provides a reference list of dataspace functions, the H5S APIs. + * see \ref H5S reference manual provides a reference list of dataspace functions, the H5S APIs. * * \subsection subsec_dataspace_program Definition of Dataspace Objects and the Dataspace Programming Model * @@ -977,9 +977,9 @@ * \subsection subsec_dataspace_refer References * * Another use of selections is to store a reference to a region of a dataset in the file or an external file. - An HDF5 object reference + * An HDF5 object reference * object is a pointer to an object (attribute, dataset, group, or committed datatype) in the file or an - external file. A selection can + * external file. A selection can * be used to create a pointer to a set of selected elements of a dataset, called a region reference. The * selection can be either a point selection or a hyperslab selection. * @@ -990,13 +990,179 @@ * To discover the elements and/or read the data, the region reference can be dereferenced to obtain the * identifiers for the dataset and dataspace. * - * For more information, \see subsubsec_datatype_other_refs. + * For more information, \see \ref subsubsec_datatype_other_refs. * * \subsubsection subsubsec_dataspace_refer_use Example Uses for Region References + * Region references are used to implement stored pointers to data within a dataset. For example, features + * in a large dataset might be indexed by a table. See the figure below. This table could be stored as an + * HDF5 dataset with a compound datatype, for example, with a field for the name of the feature and a region + * reference to point to the feature in the dataset. See the second figure below. + * + * + * + * + * + *
    + * \image html Dspace_features.gif " Features indexed by a table" + *
    + * + * + * + * + * + *
    + * \image html Dspace_features_cmpd.gif "Storing the table with a compound datatype" + *
    * * \subsubsection subsubsec_dataspace_refer_create Creating References to Regions + * To create a region reference: + * \li 1. Create or open the dataset that contains the region + * \li 2. Get the dataspace for the dataset + * \li 3. Define a selection that specifies the region + * \li 4. Create a region reference using the dataset and dataspace with selection + * \li 5. Write the region reference(s) to the desired dataset or attribute + * \li 6. Release the region reference(s) + * + * The figure below shows a diagram of a file with three datasets. Dataset D1 and D2 are two dimensional + * arrays of integers. Dataset R1 is a one dimensional array of references to regions in D1 and D2. The + * regions can be any valid selection of the dataspace of the target dataset. + * + * + * + * + *
    + * \image html Dspace_three_datasets.gif "A file with three datasets" + *
    + * Note: In the figure above, R1 is a 1 D array of region pointers; each pointer refers to a selection + * in one dataset. + * + * The example below shows code to create the array of region references. The references are created in an + * array of type #H5R_ref_t. Each region is defined as a selection on the dataspace of the dataset, + * and a reference is created using \ref H5Rcreate_region(). The call to \ref H5Rcreate_region() specifies the + file, + * dataset, and the dataspace with selection. + * + * Create an array of region references + * \code + * // create an array of 4 region references + * H5R_ref_t ref[4]; + * + * // Create a reference to the first hyperslab in the first Dataset. + * offset[0] = 1; offset[1] = 1; + * count[0] = 3; count[1] = 2; + * status = H5Sselect_hyperslab(space_id, H5S_SELECT_SET, offset, NULL, count, NULL); + * status = H5Rcreate_region(file_id, "D1", space_id, H5P_DEFAULT, &ref[0]); + * + * // The second reference is to a union of hyperslabs in the first Dataset + * offset[0] = 5; offset[1] = 3; + * count[0] = 1; count[1] = 4; + * status = H5Sselect_none(space_id); + * status = H5Sselect_hyperslab(space_id, H5S_SELECT_SET, offset, NULL, count, NULL); + * offset[0] = 6; offset[1] = 5; + * count[0] = 1; count[1] = 2; + * status = H5Sselect_hyperslab(space_id, H5S_SELECT_OR, offset, NULL, count, NULL); + * status = H5Rcreate_region(file_id, "D1", space_id, H5P_DEFAULT, &ref[1]); + * + * // the fourth reference is to a selection of points in the first Dataset + * status = H5Sselect_none(space_id); + * coord[0][0] = 4; coord[0][1] = 4; + * coord[1][0] = 2; coord[1][1] = 6; + * coord[2][0] = 3; coord[2][1] = 7; + * coord[3][0] = 1; coord[3][1] = 5; + * coord[4][0] = 5; coord[4][1] = 8; + * + * status = H5Sselect_elements(space_id, H5S_SELECT_SET, num_points, (const hssize_t **)coord); + * status = H5Rcreate_region(file_id, "D1", space_id, H5P_DEFAULT, &ref[3]); + * + * // the third reference is to a hyperslab in the second Dataset + * offset[0] = 0; offset[1] = 0; + * count[0] = 4; count[1] = 6; + * status = H5Sselect_hyperslab(space_id2, H5S_SELECT_SET, offset, NULL, count, NULL); + * status = H5Rcreate_region(file_id, "D2", space_id2, H5P_DEFAULT, &ref[2]); + * \endcode + * + * When all the references are created, the array of references is written to the dataset R1. The + * dataset is declared to have datatype #H5T_STD_REF. See the example below. Also, note the release + * of the references afterwards. + * + * Write the array of references to a dataset + * \code + * Hsize_t dimsr[1]; + * dimsr[0] = 4; + * + * // Dataset with references. + * spacer_id = H5Screate_simple(1, dimsr, NULL); + * dsetr_id = H5Dcreate(file_id, "R1", H5T_STD_REF_DSETREG, spacer_id, H5P_DEFAULT, H5P_DEFAULT, + * H5P_DEFAULT); + * + * // Write dataset with the references. + * status = H5Dwrite(dsetr_id, H5T_STD_REF_DSETREG, H5S_ALL, H5S_ALL, H5P_DEFAULT, ref); + * + * status = H5Rdestroy(&ref[0]); + * status = H5Rdestroy(&ref[1]); + * status = H5Rdestroy(&ref[0]); + * status = H5Rdestroy(&ref[1]); + * \endcode + * + * When creating region references, the following rules are enforced. + * \li The selection must be a valid selection for the target dataset, just as when transferring data + * \li The dataset must exist in the file when the reference is created; #H5Rcreate_region + * \li The target dataset must be in the same file as the stored reference * * \subsubsection subsubsec_dataspace_refer_read Reading References to Regions + * To retrieve data from a region reference, the reference must be read from the file, and then the data can + * be retrieved. The steps are: + * \li 1. Open the dataset or attribute containing the reference objects + * \li 2. Read the reference object(s) + * \li 3. For each region reference, get the dataset (#H5Ropen_object) and dataspace (#H5Ropen_region) + * \li 4. Use the dataspace and datatype to discover what space is needed to store the data, allocate the + * correct storage and create a dataspace and datatype to define the memory data layout + * \li 5. Release the region reference(s) + * + * The example below shows code to read an array of region references from a dataset, and then read the + * data from the first selected region. Note that the region reference has information that records the + * dataset (within the file) and the selection on the dataspace of the dataset. After dereferencing the + * regions reference, the datatype, number of points, and some aspects of the selection can be discovered. + * (For a union of hyperslabs, it may not be possible to determine the exact set of hyperslabs that has been + * combined.) + * The table below the code example shows the inquiry functions. + * + * When reading data from a region reference, the following rules are enforced: + * \li The target dataset must be present and accessible in the file + * \li The selection must be a valid selection for the dataset + * + * Read an array of region references; read from the first selection + * \code + * dsetr_id = H5Dopen (file_id, "R1", H5P_DEFAULT); + * status = H5Dread(dsetr_id, H5T_STD, H5S_ALL, H5S_ALL, H5P_DEFAULT, ref_out); + * + * // Dereference the first reference. + * // 1) get the dataset (H5Ropen_object) + * // 2) get the selected dataspace (H5Ropen_region) + * + * dsetv_id = H5Ropen_object(&ref_out[0], H5P_DEFAULT, H5P_DEFAULT); + * space_id = H5Ropen_region(&ref_out[0], H5P_DEFAULT, H5P_DEFAULT); + * + * // Discover how many points and shape of the data + * ndims = H5Sget_simple_extent_ndims(space_id); + * H5Sget_simple_extent_dims(space_id,dimsx,NULL); + * + * // Read and display hyperslab selection from the dataset. + * dimsy[0] = H5Sget_select_npoints(space_id); + * spacex_id = H5Screate_simple(1, dimsy, NULL); + * + * status = H5Dread(dsetv_id, H5T_NATIVE_INT, H5S_ALL, space_id, H5P_DEFAULT, data_out); + * printf("Selected hyperslab: "); + * for (i = 0; i < 8; i++) { + * printf("\n"); + * for (j = 0; j < 10; j++) + * printf("%d ", data_out[i][j]); + * } + * printf("\n"); + * + * status = H5Rdestroy(&ref_out[0]); + * \endcode + * * * \subsection subsec_dataspace_deprecated_refer Deprecated References to Dataset Regions * The API described in this section was deprecated since HDF5 1.12.0. Shown are @@ -1016,34 +1182,7 @@ * retrieved with a call to #H5Rget_region(). The selected dataspace can be used to read the selected data * elements. * - * For more information, \see subsubsec_datatype_other_refs. - * - * \subsubsection subsubsec_dataspace_deprecated_refer_use Deprecated Example Uses for Region References - * - * Region references are used to implement stored pointers to data within a dataset. For example, features - * in a large dataset might be indexed by a table. See the figure below. This table could be stored as an - * HDF5 dataset with a compound datatype, for example, with a field for the name of the feature and a region - * reference to point to the feature in the dataset. See the second figure below. - * - * - * - * - * - *
    - * \image html Dspace_features.gif " Features indexed by a table" - *
    - * - * - * - * - * - *
    - * \image html Dspace_features_cmpd.gif "Storing the table with a compound datatype" - *
    - * - * * \subsubsection subsubsec_dataspace_deprecated_refer_create Deprecated Creating References to Regions - * * To create a region reference: * \li 1. Create or open the dataset that contains the region * \li 2. Get the dataspace for the dataset @@ -1183,6 +1322,7 @@ * printf("\n"); * \endcode * + * \subsection subsec_dataspace_funcs Functions * * * @@ -1243,7 +1383,6 @@ * *
    The inquiry functions
    * - * * \subsection subsec_dataspace_sample Sample Programs * * This section contains the full programs from which several of the code examples in this chapter were diff --git a/src/H5Tcommit.c b/src/H5Tcommit.c index d64c4e82439..92853c63058 100644 --- a/src/H5Tcommit.c +++ b/src/H5Tcommit.c @@ -349,7 +349,7 @@ H5Tcommit_anon(hid_t loc_id, hid_t type_id, hid_t tcpl_id, hid_t tapl_id) loc_params.obj_type = H5I_get_type(loc_id); /* Get the file object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid file identifier"); /* Commit the datatype */ diff --git a/src/H5Tdeprec.c b/src/H5Tdeprec.c index cc998346cb4..3483597346e 100644 --- a/src/H5Tdeprec.c +++ b/src/H5Tdeprec.c @@ -116,7 +116,7 @@ H5Tcommit1(hid_t loc_id, const char *name, hid_t type_id) loc_params.obj_type = H5I_get_type(loc_id); /* get the object from the loc_id */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid object identifier"); /* Commit the datatype */ @@ -167,7 +167,7 @@ H5Topen1(hid_t loc_id, const char *name) loc_params.obj_type = H5I_get_type(loc_id); /* Get the location object */ - if (NULL == (vol_obj = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, H5I_INVALID_HID, "invalid location identifier"); /* Open the datatype */ diff --git a/src/H5Tmodule.h b/src/H5Tmodule.h index 636679e8380..3e121469108 100644 --- a/src/H5Tmodule.h +++ b/src/H5Tmodule.h @@ -304,7 +304,7 @@ * * * - * @see H5R + * @see @ref H5R * * * @@ -971,7 +971,7 @@ * translated to and from standard types of the same class, as described above. * * \subsection subsec_datatype_function Datatype Function Summaries - * @see H5T reference manual provides a reference list of datatype functions, the H5T APIs. + * see \ref H5T reference manual provides a reference list of datatype functions, the H5T APIs. * * \subsection subsec_datatype_program Programming Model for Datatypes * The HDF5 Library implements an object-oriented model of datatypes. HDF5 datatypes are @@ -2164,6 +2164,7 @@ filled according to the value of this property. The padding can be: * \endcode * * The example below shows the content of the file written on a little-endian machine. + * * Create and write a little-endian dataset with a compound datatype in C * \code * HDF5 “SDScompound.h5” { @@ -2248,6 +2249,7 @@ filled according to the value of this property. The padding can be: * * The figure below shows the content of the file written on a little-endian machine. Only float and * double fields are written. The default fill value is used to initialize the unwritten integer field. + * * Writing floats and doubles to a dataset on a little-endian system * \code * HDF5 “SDScompound.h5” { @@ -2285,6 +2287,7 @@ filled according to the value of this property. The padding can be: * compound datatype. As this example illustrates, writing and reading compound datatypes in * Fortran is always done by fields. The content of the written file is the same as shown in the * example above. + * * Create and write a dataset with a compound datatype in Fortran * \code * ! One cannot write an array of a derived datatype in @@ -2921,6 +2924,7 @@ filled according to the value of this property. The padding can be: * declaration of a datatype of type #H5T_C_S1 which is set to #H5T_VARIABLE. The HDF5 * Library automatically translates between this and the vl_t structure. Note: the #H5T_VARIABLE * size can only be used with string datatypes. + * * Set the string datatype size to H5T_VARIABLE * \code * tid1 = H5Tcopy (H5T_C_S1); @@ -2929,6 +2933,7 @@ filled according to the value of this property. The padding can be: * * Variable-length strings can be read into C strings (in other words, pointers to zero terminated * arrays of char). See the example below. + * * Read variable-length strings into C strings * \code * char *rdata[SPACE1_DIM1]; @@ -3053,6 +3058,7 @@ filled according to the value of this property. The padding can be: * would be as an array of integers. The example below shows an example of how to create an * enumeration with five elements. The elements map symbolic names to 2-byte integers. See the * table below. + * * Create an enumeration with five elements * \code * hid_t hdf_en_colors; @@ -3582,6 +3588,7 @@ filled according to the value of this property. The padding can be: * * To create two or more datasets that share a common datatype, first commit the datatype, and then * use that datatype to create the datasets. See the example below. + * * Create a shareable datatype * \code * hid_t t1 = ...some transient type...; @@ -3697,6 +3704,7 @@ filled according to the value of this property. The padding can be: * memory. The destination datatype must be specified in the #H5Dread call. The example below * shows an example of reading a dataset of 32-bit integers. The figure below the example shows * the data transformation that is performed. + * * Specify the destination datatype with H5Dread * \code * // Stored as H5T_STD_BE32 @@ -3797,6 +3805,7 @@ filled according to the value of this property. The padding can be: * The currently supported text format used by #H5LTtext_to_dtype and #H5LTdtype_to_text is the * data description language (DDL) and conforms to the \ref DDLBNF114. The portion of the * \ref DDLBNF114 that defines HDF5 datatypes appears below. + * * The definition of HDF5 datatypes from the HDF5 DDL * \code * ::= | | | diff --git a/src/H5VLcallback.c b/src/H5VLcallback.c index 58e839c9985..0e696088ebf 100644 --- a/src/H5VLcallback.c +++ b/src/H5VLcallback.c @@ -5100,6 +5100,10 @@ H5VL_link_move(const H5VL_object_t *src_vol_obj, const H5VL_loc_params_t *loc_pa FUNC_ENTER_NOAPI(FAIL) + /* Sanity check */ + assert(src_vol_obj); + assert(src_vol_obj->data); + /* Set wrapper info in API context */ vol_obj = (src_vol_obj->data ? src_vol_obj : dst_vol_obj); if (H5VL_set_vol_wrapper(vol_obj) < 0) diff --git a/src/H5VLnative.c b/src/H5VLnative.c index ceee7f16c43..6f6b2d0768d 100644 --- a/src/H5VLnative.c +++ b/src/H5VLnative.c @@ -393,7 +393,7 @@ H5VLnative_addr_to_token(hid_t loc_id, haddr_t addr, H5O_token_t *token) bool is_native_vol_obj; /* Get the location object */ - if (NULL == (vol_obj_container = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj_container = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Make sure that the VOL object is a native connector object */ @@ -486,7 +486,7 @@ H5VLnative_token_to_addr(hid_t loc_id, H5O_token_t token, haddr_t *addr) bool is_native_vol_obj; /* Get the location object */ - if (NULL == (vol_obj_container = (H5VL_object_t *)H5I_object(loc_id))) + if (NULL == (vol_obj_container = H5VL_vol_object(loc_id))) HGOTO_ERROR(H5E_ARGS, H5E_BADTYPE, FAIL, "invalid location identifier"); /* Make sure that the VOL object is a native connector object */ diff --git a/src/H5module.h b/src/H5module.h index a7aa05a0644..083f40005c7 100644 --- a/src/H5module.h +++ b/src/H5module.h @@ -28,6 +28,7 @@ /** \page H5DM_UG HDF5 Data Model and File Structure * * \section sec_data_model The HDF5 Data Model and File Structure + * * \subsection subsec_data_model_intro Introduction * The Hierarchical Data Format (HDF) implements a model for managing and storing data. The * model includes an abstract data model and an abstract storage model (the data format), and @@ -100,8 +101,11 @@ * model, and stored in a storage medium. The stored objects include header blocks, free lists, data * blocks, B-trees, and other objects. Each group or dataset is stored as one or more header and data * blocks. - * @see HDF5 File Format Specification - * for more information on how these objects are organized. The HDF5 library can also use other + * + * For more information on how these objects are organized; + * see HDF5 File Format Specification + * + * The HDF5 library can also use other * libraries and modules such as compression. * * diff --git a/src/H5private.h b/src/H5private.h index 168117c15a1..5a201cbfd97 100644 --- a/src/H5private.h +++ b/src/H5private.h @@ -1154,34 +1154,34 @@ H5_DLL herr_t H5_trace_args(struct H5RS_str_t *rs, const char *type, va_list ap) * Handles H5XY_. */ #define H5_IS_API(S) \ - ('_' != ((const char *)S)[2] /* underscore at position 2 */ \ - && '_' != ((const char *)S)[3] /* underscore at position 3 */ \ - && !( /* NOT */ \ - ((const char *)S)[4] /* pos 4 exists */ \ - && (isupper(S[3]) || isdigit(S[3])) /* pos 3 dig | uc */ \ - && '_' == ((const char *)S)[4] /* pos 4 underscore */ \ + ('_' != ((const char *)S)[2] /* underscore at position 2 */ \ + && '_' != ((const char *)S)[3] /* underscore at position 3 */ \ + && !( /* NOT */ \ + ((const char *)S)[4] /* pos 4 exists */ \ + && (isupper((int)S[3]) || isdigit((int)S[3])) /* pos 3 dig | uc */ \ + && '_' == ((const char *)S)[4] /* pos 4 underscore */ \ )) /* `S' is the name of a function which is being tested to check if it's */ /* a public API function */ #define H5_IS_PUB(S) \ - (((isdigit(S[1]) || isupper(S[1])) && islower(S[2])) || \ - ((isdigit(S[2]) || isupper(S[2])) && islower(S[3])) || \ - (!S[4] || ((isdigit(S[3]) || isupper(S[3])) && islower(S[4])))) + (((isdigit((int)S[1]) || isupper((int)S[1])) && islower((int)S[2])) || \ + ((isdigit((int)S[2]) || isupper((int)S[2])) && islower((int)S[3])) || \ + (!S[4] || ((isdigit((int)S[3]) || isupper((int)S[3])) && islower((int)S[4])))) /* `S' is the name of a function which is being tested to check if it's */ /* a private library function */ #define H5_IS_PRIV(S) \ - (((isdigit(S[1]) || isupper(S[1])) && '_' == S[2] && islower(S[3])) || \ - ((isdigit(S[2]) || isupper(S[2])) && '_' == S[3] && islower(S[4])) || \ - ((isdigit(S[3]) || isupper(S[3])) && '_' == S[4] && islower(S[5]))) + (((isdigit((int)S[1]) || isupper((int)S[1])) && '_' == S[2] && islower((int)S[3])) || \ + ((isdigit((int)S[2]) || isupper((int)S[2])) && '_' == S[3] && islower((int)S[4])) || \ + ((isdigit((int)S[3]) || isupper((int)S[3])) && '_' == S[4] && islower((int)S[5]))) /* `S' is the name of a function which is being tested to check if it's */ /* a package private function */ #define H5_IS_PKG(S) \ - (((isdigit(S[1]) || isupper(S[1])) && '_' == S[2] && '_' == S[3] && islower(S[4])) || \ - ((isdigit(S[2]) || isupper(S[2])) && '_' == S[3] && '_' == S[4] && islower(S[5])) || \ - ((isdigit(S[3]) || isupper(S[3])) && '_' == S[4] && '_' == S[5] && islower(S[6]))) + (((isdigit((int)S[1]) || isupper((int)S[1])) && '_' == S[2] && '_' == S[3] && islower((int)S[4])) || \ + ((isdigit((int)S[2]) || isupper((int)S[2])) && '_' == S[3] && '_' == S[4] && islower((int)S[5])) || \ + ((isdigit((int)S[3]) || isupper((int)S[3])) && '_' == S[4] && '_' == S[5] && islower((int)S[6]))) /* global library version information string */ extern char H5_lib_vers_info_g[]; diff --git a/test/links.c b/test/links.c index 6612f56e363..ad9948af04d 100644 --- a/test/links.c +++ b/test/links.c @@ -1938,12 +1938,14 @@ test_move_preserves(hid_t fapl_id, bool new_format) *------------------------------------------------------------------------- */ #ifndef H5_NO_DEPRECATED_SYMBOLS +#define NUM_OBJS 3 /* number of groups in FILENAME[0] file */ static int test_deprec(hid_t fapl, bool new_format) { hid_t file_id = H5I_INVALID_HID; hid_t group1_id = H5I_INVALID_HID; hid_t group2_id = H5I_INVALID_HID; + hid_t group3_id = H5I_INVALID_HID; H5G_stat_t sb_hard1, sb_hard2, sb_soft1, sb_soft2; H5G_obj_t obj_type; /* Object type */ hsize_t num_objs; /* Number of objects in a group */ @@ -1967,6 +1969,8 @@ test_deprec(hid_t fapl, bool new_format) FAIL_STACK_ERROR; if ((group2_id = H5Gcreate2(file_id, "group2", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) FAIL_STACK_ERROR; + if ((group3_id = H5Gcreate2(file_id, "group3", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) + FAIL_STACK_ERROR; /* Test H5Gset and get comment */ @@ -2022,7 +2026,7 @@ test_deprec(hid_t fapl, bool new_format) /* Test getting the number of objects in a group */ if (H5Gget_num_objs(file_id, &num_objs) < 0) FAIL_STACK_ERROR; - if (num_objs != 2) + if (num_objs != NUM_OBJS) TEST_ERROR; if (H5Gget_num_objs(group1_id, &num_objs) < 0) FAIL_STACK_ERROR; @@ -2113,9 +2117,43 @@ test_deprec(hid_t fapl, bool new_format) /* Test H5Gmove and H5Gmove2 */ if (H5Gmove(file_id, "group1", "moved_group1") < 0) - FAIL_STACK_ERROR; + TEST_ERROR; if (H5Gmove2(file_id, "group2", group1_id, "moved_group2") < 0) - FAIL_STACK_ERROR; + TEST_ERROR; + if (H5Gmove2(file_id, "group3", H5L_SAME_LOC, "moved_group3") < 0) + TEST_ERROR; + if (H5Gmove2(file_id, "moved_group3", group2_id, "moved_group3_to_group2") < 0) + TEST_ERROR; + + /* Test H5Gmove2 with H5L_SAME_LOC */ + if (H5Gmove2(group2_id, "moved_group3_to_group2", H5L_SAME_LOC, "group3_same_loc") < 0) + TEST_ERROR; + + /* Test H5Gmove2 with H5L_SAME_LOC */ + if (H5Gmove2(H5L_SAME_LOC, "moved_group1/moved_group2", file_id, "moved_group2_again") < 0) + TEST_ERROR; + + /* Put back moved_group2 for subsequent tests */ + if (H5Gmove2(file_id, "moved_group2_again", file_id, "moved_group1/moved_group2") < 0) + TEST_ERROR; + + /* Test passing in invalid ID */ + H5E_BEGIN_TRY + { + hid_t bad_id = H5I_BADID; + if (H5Gmove2(bad_id, "group2", group1_id, "moved_group2") >= 0) + TEST_ERROR; + } + H5E_END_TRY + + /* Test passing in invalid ID */ + H5E_BEGIN_TRY + { + hid_t bad_id = H5I_BADID; + if (H5Gmove2(file_id, "group2", bad_id, "moved_group2") >= 0) + TEST_ERROR; + } + H5E_END_TRY /* Ensure that both groups can be opened */ if (H5Gclose(group2_id) < 0) @@ -2129,6 +2167,8 @@ test_deprec(hid_t fapl, bool new_format) FAIL_STACK_ERROR; /* Close open IDs */ + if (H5Gclose(group3_id) < 0) + FAIL_STACK_ERROR; if (H5Gclose(group2_id) < 0) FAIL_STACK_ERROR; if (H5Gclose(group1_id) < 0) @@ -2154,6 +2194,7 @@ test_deprec(hid_t fapl, bool new_format) error: H5E_BEGIN_TRY { + H5Gclose(group3_id); H5Gclose(group2_id); H5Gclose(group1_id); H5Fclose(file_id); @@ -3293,7 +3334,8 @@ external_link_closing_deprec(hid_t fapl, bool new_format) /* Test copy (as of this test, it uses the same code as move) */ if (H5Lcopy(fid1, "elink/elink/elink", fid1, "elink/elink/elink_copied", H5P_DEFAULT, H5P_DEFAULT) < 0) FAIL_STACK_ERROR; - if (H5Lcopy(fid1, "elink/elink/elink", fid1, "elink/elink/elink/elink_copied2", H5P_DEFAULT, + /* Also exercise H5L_SAME_LOC */ + if (H5Lcopy(H5L_SAME_LOC, "elink/elink/elink", fid1, "elink/elink/elink/elink_copied2", H5P_DEFAULT, H5P_DEFAULT) < 0) FAIL_STACK_ERROR; @@ -4325,7 +4367,8 @@ lapl_nlinks_deprec(hid_t fapl, bool new_format) */ if (H5Lcopy(fid, "soft17", fid, "soft17/newer_soft", H5P_DEFAULT, plist) < 0) TEST_ERROR; - if (H5Lmove(fid, "soft17/newer_soft", fid, "soft17/newest_soft", H5P_DEFAULT, plist) < 0) + /* Also exercise H5L_SAME_LOC */ + if (H5Lmove(fid, "soft17/newer_soft", H5L_SAME_LOC, "soft17/newest_soft", H5P_DEFAULT, plist) < 0) TEST_ERROR; /* H5Olink */ diff --git a/test/mirror_vfd.c b/test/mirror_vfd.c index 124fc6a6b0c..fe6695b9a06 100644 --- a/test/mirror_vfd.c +++ b/test/mirror_vfd.c @@ -1236,6 +1236,7 @@ create_mirroring_split_fapl(const char *basename, struct mirrortest_filenames *n mirror_conf.handshake_port = opts->portno; if (strncpy(mirror_conf.remote_ip, opts->ip, H5FD_MIRROR_MAX_IP_LEN) == NULL) TEST_ERROR; + mirror_conf.remote_ip[H5FD_MIRROR_MAX_IP_LEN] = '\0'; if ((splitter_config->wo_fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) TEST_ERROR; if (H5Pset_fapl_mirror(splitter_config->wo_fapl_id, &mirror_conf) < 0) @@ -1248,8 +1249,10 @@ create_mirroring_split_fapl(const char *basename, struct mirrortest_filenames *n /* Set file paths for w/o and logfile */ if (strncpy(splitter_config->wo_path, (const char *)names->wo, H5FD_SPLITTER_PATH_MAX) == NULL) TEST_ERROR; + splitter_config->wo_path[H5FD_SPLITTER_PATH_MAX] = '\0'; if (strncpy(splitter_config->log_file_path, (const char *)names->log, H5FD_SPLITTER_PATH_MAX) == NULL) TEST_ERROR; + splitter_config->log_file_path[H5FD_SPLITTER_PATH_MAX] = '\0'; /* Create Splitter FAPL */ if ((ret_value = H5Pcreate(H5P_FILE_ACCESS)) < 0) diff --git a/test/vol.c b/test/vol.c index 041e0c9b88b..9fa4f06c1ca 100644 --- a/test/vol.c +++ b/test/vol.c @@ -914,11 +914,9 @@ test_basic_file_operation(const char *driver_name) TEST_ERROR; } - /* H5Fcreate */ if ((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id)) < 0) TEST_ERROR; - /* H5Fget_obj_count */ if ((obj_count = H5Fget_obj_count(fid, H5F_OBJ_FILE)) < 0) TEST_ERROR; if ((obj_count = H5Fget_obj_count(fid, H5F_OBJ_ALL)) < 0) @@ -926,7 +924,6 @@ test_basic_file_operation(const char *driver_name) if ((obj_count = H5Fget_obj_count((hid_t)H5F_OBJ_ALL, H5F_OBJ_DATASET)) < 0) TEST_ERROR; - /* H5Fget_obj_ids */ if ((obj_count = H5Fget_obj_ids(fid, H5F_OBJ_ALL, 2, obj_id_list)) < 0) TEST_ERROR; if ((obj_count = H5Fget_obj_ids((hid_t)H5F_OBJ_ALL, H5F_OBJ_DATASET, 2, obj_id_list)) < 0) @@ -937,7 +934,6 @@ test_basic_file_operation(const char *driver_name) strcmp(driver_name, "family") != 0 && strcmp(driver_name, "direct") != 0 && strcmp(driver_name, "core") != 0 && strcmp(driver_name, "core_paged") != 0 && strcmp(driver_name, "mpio") != 0 && strcmp(driver_name, "splitter") != 0)) { - /* H5Fget_access_plist */ if ((fapl_id2 = H5Fget_access_plist(fid)) < 0) TEST_ERROR; if (H5Pequal(fapl_id, fapl_id2) != true) @@ -946,53 +942,42 @@ test_basic_file_operation(const char *driver_name) TEST_ERROR; } /* end if */ - /* H5Fget_create_plist */ if ((fcpl_id = H5Fget_create_plist(fid)) < 0) TEST_ERROR; if (H5Pclose(fcpl_id) < 0) TEST_ERROR; - /* H5Fget_filesize */ if (H5Fget_filesize(fid, &file_size) < 0) TEST_ERROR; /* Can't retrieve VFD handle for split / multi / family VFDs */ if ((bool)(strcmp(driver_name, "split") != 0 && strcmp(driver_name, "multi") != 0 && strcmp(driver_name, "family") != 0)) { - /* H5Fget_vfd_handle */ if (H5Fget_vfd_handle(fid, H5P_DEFAULT, &os_file_handle) < 0) TEST_ERROR; } /* end if */ - /* H5Fget_intent */ if (H5Fget_intent(fid, &intent) < 0) TEST_ERROR; - /* H5Fget_info2 */ if (H5Fget_info2(fid, &finfo) < 0) TEST_ERROR; - /* H5Fget_name */ if (H5Fget_name(fid, name, 32) < 0) TEST_ERROR; - /* H5Fclear_elink_file_cache */ if (H5Fclear_elink_file_cache(fid) < 0) TEST_ERROR; - /* H5Fflush */ if (H5Fflush(fid, H5F_SCOPE_GLOBAL) < 0) TEST_ERROR; - /* H5Fclose */ if (H5Fclose(fid) < 0) TEST_ERROR; - /* H5Fis_accessible */ if (H5Fis_accessible(filename, fapl_id) < 0) TEST_ERROR; - /* H5Fopen */ if ((fid = H5Fopen(filename, H5F_ACC_RDWR, fapl_id)) < 0) TEST_ERROR; @@ -1001,7 +986,6 @@ test_basic_file_operation(const char *driver_name) strcmp(driver_name, "family") != 0 && strcmp(driver_name, "direct") != 0 && strcmp(driver_name, "core") != 0 && strcmp(driver_name, "core_paged") != 0 && strcmp(driver_name, "mpio") != 0 && strcmp(driver_name, "splitter") != 0)) { - /* H5Fget_access_plist */ if ((fapl_id2 = H5Fget_access_plist(fid)) < 0) TEST_ERROR; if (H5Pequal(fapl_id, fapl_id2) != true) @@ -1018,7 +1002,6 @@ test_basic_file_operation(const char *driver_name) strcmp(driver_name, "family") != 0 && strcmp(driver_name, "direct") != 0 && strcmp(driver_name, "core") != 0 && strcmp(driver_name, "core_paged") != 0 && strcmp(driver_name, "mpio") != 0 && strcmp(driver_name, "splitter") != 0)) { - /* H5Fget_access_plist */ if ((fapl_id2 = H5Fget_access_plist(fid_reopen)) < 0) TEST_ERROR; if (H5Pequal(fapl_id, fapl_id2) != true) @@ -1034,7 +1017,6 @@ test_basic_file_operation(const char *driver_name) h5_delete_test_file(FILENAME[0], fapl_id); - /* H5Pclose */ if (H5Pclose(fapl_id) < 0) TEST_ERROR; @@ -1086,27 +1068,22 @@ test_basic_group_operation(void) if ((fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, fapl_id)) < 0) TEST_ERROR; - /* H5Gcreate */ if ((gid = H5Gcreate2(fid, NATIVE_VOL_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR; - /* H5Gget_create_plist */ if ((gcpl_id = H5Gget_create_plist(gid)) < 0) TEST_ERROR; if (H5Pclose(gcpl_id) < 0) TEST_ERROR; - /* H5Gget_info */ if (H5Gget_info(gid, &info) < 0) TEST_ERROR; if (H5Gget_info(fid, &info) < 0) TEST_ERROR; - /* H5Gget_info_by_name */ if (H5Gget_info_by_name(fid, NATIVE_VOL_TEST_GROUP_NAME, &info, H5P_DEFAULT) < 0) TEST_ERROR; - /* H5Gget_info_by_idx */ if (H5Gget_info_by_idx(fid, "/", H5_INDEX_NAME, H5_ITER_NATIVE, 0, &info, H5P_DEFAULT) < 0) TEST_ERROR; @@ -1117,19 +1094,15 @@ test_basic_group_operation(void) if (H5Gflush(gid) < 0) TEST_ERROR; - /* H5Gclose */ if (H5Gclose(gid) < 0) TEST_ERROR; - /* H5Gopen */ if ((gid = H5Gopen2(fid, NATIVE_VOL_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) TEST_ERROR; - /* H5Gcreate_anon */ if ((gid_a = H5Gcreate_anon(fid, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR; - /* H5Grefresh */ if (H5Grefresh(gid) < 0) TEST_ERROR; @@ -1142,7 +1115,6 @@ test_basic_group_operation(void) h5_delete_test_file(FILENAME[0], fapl_id); - /* H5Pclose */ if (H5Pclose(fapl_id) < 0) TEST_ERROR; @@ -1213,7 +1185,6 @@ test_basic_dataset_operation(void) out_buf[i] = 0; } - /* H5Dcreate */ curr_dims = 0; if ((sid = H5Screate_simple(1, &curr_dims, &max_dims)) < 0) TEST_ERROR; @@ -1226,7 +1197,6 @@ test_basic_dataset_operation(void) H5P_DEFAULT)) < 0) TEST_ERROR; - /* H5Dcreate_anon */ if ((did_a = H5Dcreate_anon(fid, H5T_NATIVE_INT, sid, dcpl_id, H5P_DEFAULT)) < 0) TEST_ERROR; @@ -1235,7 +1205,6 @@ test_basic_dataset_operation(void) if (H5Pclose(dcpl_id) < 0) TEST_ERROR; - /* H5Dset_extent */ curr_dims = N_ELEMENTS; if (H5Dset_extent(did, &curr_dims) < 0) TEST_ERROR; @@ -1247,35 +1216,28 @@ test_basic_dataset_operation(void) if (H5Dflush(did) < 0) TEST_ERROR; - /* H5Dwrite */ if (H5Dwrite(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, in_buf) < 0) TEST_ERROR; - /* H5Drefresh */ if (H5Drefresh(did) < 0) TEST_ERROR; - /* H5Dclose */ if (H5Dclose(did) < 0) TEST_ERROR; if (H5Dclose(did_a) < 0) TEST_ERROR; - /* H5Dopen */ if ((did = H5Dopen2(fid, NATIVE_VOL_TEST_DATASET_NAME, H5P_DEFAULT)) < 0) TEST_ERROR; - /* H5Dget_space */ if ((sid = H5Dget_space(did)) < 0) TEST_ERROR; if (H5Sclose(sid) < 0) TEST_ERROR; - /* H5Dget_space_status */ if (H5Dget_space_status(did, &status) < 0) TEST_ERROR; - /* H5Dget_type */ if ((tid = H5Dget_type(did)) < 0) TEST_ERROR; if (H5Tclose(tid) < 0) @@ -1287,13 +1249,11 @@ test_basic_dataset_operation(void) if (H5Tclose(tid) < 0) TEST_ERROR; - /* H5Dget_create_plist */ if ((dcpl_id = H5Dget_create_plist(did)) < 0) TEST_ERROR; if (H5Pclose(dcpl_id) < 0) TEST_ERROR; - /* H5Dget_access_plist */ if ((dapl_id = H5Dget_access_plist(did)) < 0) TEST_ERROR; if (H5Pclose(dapl_id) < 0) @@ -1311,7 +1271,6 @@ test_basic_dataset_operation(void) if (HADDR_UNDEF != (offset = H5Dget_offset(did))) TEST_ERROR; - /* H5Dread */ if (H5Dread(did, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, out_buf) < 0) TEST_ERROR; @@ -1326,7 +1285,6 @@ test_basic_dataset_operation(void) h5_delete_test_file(FILENAME[0], fapl_id); - /* H5Pclose */ if (H5Pclose(fapl_id) < 0) TEST_ERROR; @@ -1391,44 +1349,35 @@ test_basic_attribute_operation(void) if ((sid = H5Screate_simple(1, &dims, &dims)) < 0) TEST_ERROR; - /* H5Acreate */ if ((aid = H5Acreate2(fid, NATIVE_VOL_TEST_ATTRIBUTE_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR; - /* H5Awrite */ if (H5Awrite(aid, H5T_NATIVE_INT, &data_in) < 0) TEST_ERROR; - /* H5Aread */ if (H5Aread(aid, H5T_NATIVE_INT, &data_out) < 0) TEST_ERROR; if (data_in != data_out) TEST_ERROR; - /* H5Aclose */ if (H5Aclose(aid) < 0) TEST_ERROR; - /* H5Aopen */ if ((aid = H5Aopen(fid, NATIVE_VOL_TEST_ATTRIBUTE_NAME, H5P_DEFAULT)) < 0) TEST_ERROR; if (H5Aclose(aid) < 0) TEST_ERROR; - /* H5Adelete */ if (H5Adelete(fid, NATIVE_VOL_TEST_ATTRIBUTE_NAME) < 0) TEST_ERROR; - /* H5Acreate_by_name */ if ((aid_name = H5Acreate_by_name(fid, NATIVE_VOL_TEST_GROUP_NAME, NATIVE_VOL_TEST_ATTRIBUTE_NAME, H5T_NATIVE_INT, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR; - /* H5Aclose */ if (H5Aclose(aid_name) < 0) TEST_ERROR; - /* H5Adelete_by_name */ if (H5Adelete_by_name(fid, NATIVE_VOL_TEST_GROUP_NAME, NATIVE_VOL_TEST_ATTRIBUTE_NAME, H5P_DEFAULT) < 0) TEST_ERROR; @@ -1441,7 +1390,6 @@ test_basic_attribute_operation(void) h5_delete_test_file(FILENAME[0], fapl_id); - /* H5Pclose */ if (H5Pclose(fapl_id) < 0) TEST_ERROR; @@ -1495,23 +1443,19 @@ test_basic_object_operation(void) if ((gid = H5Gcreate2(fid, NATIVE_VOL_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR; - /* H5Oget_info */ if (H5Oget_info3(fid, &object_info, H5O_INFO_ALL) < 0) TEST_ERROR; //! [H5Oget_info_by_name3_snip] - /* H5Oget_info_by_name */ if (H5Oget_info_by_name3(fid, NATIVE_VOL_TEST_GROUP_NAME, &object_info, H5O_INFO_ALL, H5P_DEFAULT) < 0) TEST_ERROR; //! [H5Oget_info_by_name3_snip] - /* H5Oexists_by_name */ if (H5Oexists_by_name(fid, NATIVE_VOL_TEST_GROUP_NAME, H5P_DEFAULT) != true) TEST_ERROR; - /* H5Oopen/close */ if ((oid = H5Oopen(fid, NATIVE_VOL_TEST_GROUP_NAME, H5P_DEFAULT)) < 0) TEST_ERROR; if (H5Oclose(oid) < 0) @@ -1524,7 +1468,6 @@ test_basic_object_operation(void) h5_delete_test_file(FILENAME[0], fapl_id); - /* H5Pclose */ if (H5Pclose(fapl_id) < 0) TEST_ERROR; @@ -1572,7 +1515,6 @@ test_basic_link_operation(void) if ((gid = H5Gcreate2(fid, NATIVE_VOL_TEST_GROUP_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR; - /* H5Lcreate_hard */ if (H5Lcreate_hard(fid, "/", gid, NATIVE_VOL_TEST_HARD_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) TEST_ERROR; @@ -1580,18 +1522,15 @@ test_basic_link_operation(void) if (H5Lcreate_soft("/", fid, NATIVE_VOL_TEST_SOFT_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) TEST_ERROR; - /* H5Lexists */ if (H5Lexists(gid, NATIVE_VOL_TEST_HARD_LINK_NAME, H5P_DEFAULT) < 0) TEST_ERROR; if (H5Lexists(fid, NATIVE_VOL_TEST_SOFT_LINK_NAME, H5P_DEFAULT) < 0) TEST_ERROR; - /* H5Lcopy */ if (H5Lcopy(gid, NATIVE_VOL_TEST_HARD_LINK_NAME, fid, NATIVE_VOL_TEST_COPY_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) TEST_ERROR; - /* H5Lmove */ if (H5Lmove(fid, NATIVE_VOL_TEST_COPY_LINK_NAME, gid, NATIVE_VOL_TEST_MOVE_LINK_NAME, H5P_DEFAULT, H5P_DEFAULT) < 0) TEST_ERROR; @@ -1603,7 +1542,6 @@ test_basic_link_operation(void) h5_delete_test_file(FILENAME[0], fapl_id); - /* H5Pclose */ if (H5Pclose(fapl_id) < 0) TEST_ERROR; @@ -1654,7 +1592,6 @@ test_basic_datatype_operation(void) if ((tid = H5Tcopy(H5T_NATIVE_INT)) < 0) TEST_ERROR; - /* H5Tcommit */ if (H5Tcommit2(fid, NATIVE_VOL_TEST_DATATYPE_NAME, tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT) < 0) TEST_ERROR; @@ -1665,23 +1602,18 @@ test_basic_datatype_operation(void) if (H5Tflush(tid) < 0) TEST_ERROR; - /* H5Trefresh */ if (H5Trefresh(tid) < 0) TEST_ERROR; - /* H5Tclose */ if (H5Tclose(tid) < 0) TEST_ERROR; - /* H5Topen */ if ((tid = H5Topen2(fid, NATIVE_VOL_TEST_DATATYPE_NAME, H5P_DEFAULT)) < 0) TEST_ERROR; - /* H5Tget_create_plist */ if ((tcpl_id = H5Tget_create_plist(tid)) < 0) TEST_ERROR; - /* H5Tcommit_anon */ if ((tid_anon = H5Tcopy(H5T_NATIVE_INT)) < 0) TEST_ERROR; if (H5Tcommit_anon(fid, tid_anon, H5P_DEFAULT, H5P_DEFAULT) < 0) @@ -1698,7 +1630,6 @@ test_basic_datatype_operation(void) h5_delete_test_file(FILENAME[0], fapl_id); - /* H5Pclose */ if (H5Pclose(fapl_id) < 0) TEST_ERROR; diff --git a/tools/src/h5dump/h5dump.c b/tools/src/h5dump/h5dump.c index dc86e526294..bb916d9fb1f 100644 --- a/tools/src/h5dump/h5dump.c +++ b/tools/src/h5dump/h5dump.c @@ -336,7 +336,7 @@ usage(const char *prog) PRINTVALSTREAM( rawoutstream, " " - "https://portal.hdfgroup.org/documentation/hdf5-docs/registered_virtual_file_drivers_vfds.html.\n"); + "https://support.hdfgroup.org/documentation/HDF5/registered_virtual_file_drivers_vfds.html.\n"); PRINTVALSTREAM(rawoutstream, " Without the file driver flag, the file will be opened with each driver in\n"); PRINTVALSTREAM(rawoutstream, " turn and in the order specified above until one driver succeeds\n"); diff --git a/tools/test/h5dump/expected/h5dump-help.txt b/tools/test/h5dump/expected/h5dump-help.txt index a78d8d820ec..694bc6ae975 100644 --- a/tools/test/h5dump/expected/h5dump-help.txt +++ b/tools/test/h5dump/expected/h5dump-help.txt @@ -105,7 +105,7 @@ usage: h5dump [OPTIONS] files --------------- Option Argument Conventions --------------- D - is the file driver to use in opening the file. Acceptable values are available from - https://portal.hdfgroup.org/documentation/hdf5-docs/registered_virtual_file_drivers_vfds.html. + https://support.hdfgroup.org/documentation/HDF5/registered_virtual_file_drivers_vfds.html. Without the file driver flag, the file will be opened with each driver in turn and in the order specified above until one driver succeeds in opening the file. diff --git a/tools/test/h5dump/expected/pbits/tnofilename-with-packed-bits.ddl b/tools/test/h5dump/expected/pbits/tnofilename-with-packed-bits.ddl index a78d8d820ec..694bc6ae975 100644 --- a/tools/test/h5dump/expected/pbits/tnofilename-with-packed-bits.ddl +++ b/tools/test/h5dump/expected/pbits/tnofilename-with-packed-bits.ddl @@ -105,7 +105,7 @@ usage: h5dump [OPTIONS] files --------------- Option Argument Conventions --------------- D - is the file driver to use in opening the file. Acceptable values are available from - https://portal.hdfgroup.org/documentation/hdf5-docs/registered_virtual_file_drivers_vfds.html. + https://support.hdfgroup.org/documentation/HDF5/registered_virtual_file_drivers_vfds.html. Without the file driver flag, the file will be opened with each driver in turn and in the order specified above until one driver succeeds in opening the file. diff --git a/tools/test/h5dump/expected/pbits/tpbitsIncomplete.ddl b/tools/test/h5dump/expected/pbits/tpbitsIncomplete.ddl index a78d8d820ec..694bc6ae975 100644 --- a/tools/test/h5dump/expected/pbits/tpbitsIncomplete.ddl +++ b/tools/test/h5dump/expected/pbits/tpbitsIncomplete.ddl @@ -105,7 +105,7 @@ usage: h5dump [OPTIONS] files --------------- Option Argument Conventions --------------- D - is the file driver to use in opening the file. Acceptable values are available from - https://portal.hdfgroup.org/documentation/hdf5-docs/registered_virtual_file_drivers_vfds.html. + https://support.hdfgroup.org/documentation/HDF5/registered_virtual_file_drivers_vfds.html. Without the file driver flag, the file will be opened with each driver in turn and in the order specified above until one driver succeeds in opening the file. diff --git a/tools/test/h5dump/expected/pbits/tpbitsLengthExceeded.ddl b/tools/test/h5dump/expected/pbits/tpbitsLengthExceeded.ddl index a78d8d820ec..694bc6ae975 100644 --- a/tools/test/h5dump/expected/pbits/tpbitsLengthExceeded.ddl +++ b/tools/test/h5dump/expected/pbits/tpbitsLengthExceeded.ddl @@ -105,7 +105,7 @@ usage: h5dump [OPTIONS] files --------------- Option Argument Conventions --------------- D - is the file driver to use in opening the file. Acceptable values are available from - https://portal.hdfgroup.org/documentation/hdf5-docs/registered_virtual_file_drivers_vfds.html. + https://support.hdfgroup.org/documentation/HDF5/registered_virtual_file_drivers_vfds.html. Without the file driver flag, the file will be opened with each driver in turn and in the order specified above until one driver succeeds in opening the file. diff --git a/tools/test/h5dump/expected/pbits/tpbitsLengthPositive.ddl b/tools/test/h5dump/expected/pbits/tpbitsLengthPositive.ddl index a78d8d820ec..694bc6ae975 100644 --- a/tools/test/h5dump/expected/pbits/tpbitsLengthPositive.ddl +++ b/tools/test/h5dump/expected/pbits/tpbitsLengthPositive.ddl @@ -105,7 +105,7 @@ usage: h5dump [OPTIONS] files --------------- Option Argument Conventions --------------- D - is the file driver to use in opening the file. Acceptable values are available from - https://portal.hdfgroup.org/documentation/hdf5-docs/registered_virtual_file_drivers_vfds.html. + https://support.hdfgroup.org/documentation/HDF5/registered_virtual_file_drivers_vfds.html. Without the file driver flag, the file will be opened with each driver in turn and in the order specified above until one driver succeeds in opening the file. diff --git a/tools/test/h5dump/expected/pbits/tpbitsMaxExceeded.ddl b/tools/test/h5dump/expected/pbits/tpbitsMaxExceeded.ddl index a78d8d820ec..694bc6ae975 100644 --- a/tools/test/h5dump/expected/pbits/tpbitsMaxExceeded.ddl +++ b/tools/test/h5dump/expected/pbits/tpbitsMaxExceeded.ddl @@ -105,7 +105,7 @@ usage: h5dump [OPTIONS] files --------------- Option Argument Conventions --------------- D - is the file driver to use in opening the file. Acceptable values are available from - https://portal.hdfgroup.org/documentation/hdf5-docs/registered_virtual_file_drivers_vfds.html. + https://support.hdfgroup.org/documentation/HDF5/registered_virtual_file_drivers_vfds.html. Without the file driver flag, the file will be opened with each driver in turn and in the order specified above until one driver succeeds in opening the file. diff --git a/tools/test/h5dump/expected/pbits/tpbitsOffsetExceeded.ddl b/tools/test/h5dump/expected/pbits/tpbitsOffsetExceeded.ddl index a78d8d820ec..694bc6ae975 100644 --- a/tools/test/h5dump/expected/pbits/tpbitsOffsetExceeded.ddl +++ b/tools/test/h5dump/expected/pbits/tpbitsOffsetExceeded.ddl @@ -105,7 +105,7 @@ usage: h5dump [OPTIONS] files --------------- Option Argument Conventions --------------- D - is the file driver to use in opening the file. Acceptable values are available from - https://portal.hdfgroup.org/documentation/hdf5-docs/registered_virtual_file_drivers_vfds.html. + https://support.hdfgroup.org/documentation/HDF5/registered_virtual_file_drivers_vfds.html. Without the file driver flag, the file will be opened with each driver in turn and in the order specified above until one driver succeeds in opening the file. diff --git a/tools/test/h5dump/expected/pbits/tpbitsOffsetNegative.ddl b/tools/test/h5dump/expected/pbits/tpbitsOffsetNegative.ddl index a78d8d820ec..694bc6ae975 100644 --- a/tools/test/h5dump/expected/pbits/tpbitsOffsetNegative.ddl +++ b/tools/test/h5dump/expected/pbits/tpbitsOffsetNegative.ddl @@ -105,7 +105,7 @@ usage: h5dump [OPTIONS] files --------------- Option Argument Conventions --------------- D - is the file driver to use in opening the file. Acceptable values are available from - https://portal.hdfgroup.org/documentation/hdf5-docs/registered_virtual_file_drivers_vfds.html. + https://support.hdfgroup.org/documentation/HDF5/registered_virtual_file_drivers_vfds.html. Without the file driver flag, the file will be opened with each driver in turn and in the order specified above until one driver succeeds in opening the file.