diff --git a/.github/codeql-config.yml b/.github/codeql-config.yml new file mode 100644 index 00000000..d8e1ba94 --- /dev/null +++ b/.github/codeql-config.yml @@ -0,0 +1,28 @@ +# Query filters to include or exclude specific queries +query-filters: + - exclude: + # See: https://codeql.github.com/codeql-query-help/cpp/cpp-short-global-name/ + id: cpp/short-global-name + - exclude: + # See: https://codeql.github.com/codeql-query-help/cpp/cpp-commented-out-code/ + id: cpp/commented-out-code + - exclude: + # See: https://codeql.github.com/codeql-query-help/cpp/cpp-poorly-documented-function/ + id: cpp/poorly-documented-function + - exclude: + # See: https://codeql.github.com/codeql-query-help/cpp/cpp-trivial-switch/ + id: cpp/trivial-switch + - exclude: + # See: https://codeql.github.com/codeql-query-help/cpp/cpp-irregular-enum-init/ + id: cpp/irregular-enum-init + - exclude: + # See: https://codeql.github.com/codeql-query-help/cpp/cpp-guarded-free/ + id: cpp/guarded-free + +# Directories to scan for vulnerabilities +paths: + - src # Main source directory + +# Directories and files to ignore during the scan +paths-ignore: + - tests # Test directory diff --git a/.github/workflows/clang-format-check.yml b/.github/workflows/clang-format-check.yml index 9efa4c77..6eac6cd8 100644 --- a/.github/workflows/clang-format-check.yml +++ b/.github/workflows/clang-format-check.yml @@ -7,9 +7,9 @@ jobs: runs-on: ubuntu-latest if: "!contains(github.event.head_commit.message, 'skip-ci')" steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Run clang-format style check for C programs. - uses: DoozyX/clang-format-lint-action@v0.18.2 + uses: DoozyX/clang-format-lint-action@v0.18 with: source: '.' extensions: 'c,h,cpp,hpp' diff --git a/.github/workflows/clang-format-fix.yml b/.github/workflows/clang-format-fix.yml index a77e955f..75f64daf 100644 --- a/.github/workflows/clang-format-fix.yml +++ b/.github/workflows/clang-format-fix.yml @@ -8,9 +8,9 @@ jobs: runs-on: ubuntu-latest if: "!contains(github.event.head_commit.message, 'skip-ci')" steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Run clang-format style check for C programs. - uses: DoozyX/clang-format-lint-action@v0.18.2 + uses: DoozyX/clang-format-lint-action@v0.18 with: source: '.' extensions: 'c,h,cpp,hpp' diff --git a/.github/workflows/h5bench-hdf5-1.10.4.yml b/.github/workflows/h5bench-hdf5-1.10.4.yml index 73338ddc..921c1c2b 100644 --- a/.github/workflows/h5bench-hdf5-1.10.4.yml +++ b/.github/workflows/h5bench-hdf5-1.10.4.yml @@ -24,7 +24,7 @@ jobs: OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v5 with: submodules: true @@ -223,7 +223,7 @@ jobs: - name: Upload artifact if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: test path: build/storage/**/std* diff --git a/.github/workflows/h5bench-hdf5-1.10.7.yml b/.github/workflows/h5bench-hdf5-1.10.7.yml index d2a30029..e6527906 100644 --- a/.github/workflows/h5bench-hdf5-1.10.7.yml +++ b/.github/workflows/h5bench-hdf5-1.10.7.yml @@ -24,7 +24,7 @@ jobs: OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v5 with: submodules: true @@ -223,7 +223,7 @@ jobs: - name: Upload artifact if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: test path: build/h5bench_e3sm-prefix/src/h5bench_e3sm-stamp/* diff --git a/.github/workflows/h5bench-hdf5-1.10.8.yml b/.github/workflows/h5bench-hdf5-1.10.8.yml index c2a81e33..ac683f7b 100644 --- a/.github/workflows/h5bench-hdf5-1.10.8.yml +++ b/.github/workflows/h5bench-hdf5-1.10.8.yml @@ -24,7 +24,7 @@ jobs: OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v5 with: submodules: true @@ -223,7 +223,7 @@ jobs: - name: Upload artifact if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: test path: build/storage/**/std* diff --git a/.github/workflows/h5bench-hdf5-1.12.0.yml b/.github/workflows/h5bench-hdf5-1.12.0.yml index 7414a91d..401570e7 100644 --- a/.github/workflows/h5bench-hdf5-1.12.0.yml +++ b/.github/workflows/h5bench-hdf5-1.12.0.yml @@ -24,7 +24,7 @@ jobs: OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v5 with: submodules: true @@ -258,7 +258,7 @@ jobs: - name: Upload artifact if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: test path: build/storage/**/std* diff --git a/.github/workflows/h5bench-hdf5-1.14.0.yml b/.github/workflows/h5bench-hdf5-1.14.0.yml index 43718915..2211a1fa 100644 --- a/.github/workflows/h5bench-hdf5-1.14.0.yml +++ b/.github/workflows/h5bench-hdf5-1.14.0.yml @@ -24,7 +24,7 @@ jobs: OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v5 with: submodules: true @@ -513,7 +513,7 @@ jobs: - name: Upload artifact if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: test path: build*/storage/**/std* diff --git a/.github/workflows/h5bench-hdf5-1.14.1.yml b/.github/workflows/h5bench-hdf5-1.14.1.yml index 617ac3de..6aa8b758 100644 --- a/.github/workflows/h5bench-hdf5-1.14.1.yml +++ b/.github/workflows/h5bench-hdf5-1.14.1.yml @@ -24,7 +24,7 @@ jobs: OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v5 with: submodules: true @@ -513,7 +513,7 @@ jobs: - name: Upload artifact if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: test path: build*/storage/**/std* diff --git a/.github/workflows/h5bench-hdf5-2.0.0.yml b/.github/workflows/h5bench-hdf5-2.0.0.yml new file mode 100644 index 00000000..343cf4a7 --- /dev/null +++ b/.github/workflows/h5bench-hdf5-2.0.0.yml @@ -0,0 +1,520 @@ +name: h5bench (HDF5 2.0.0) + +on: + pull_request: + + workflow_dispatch: + + push: + branches: + - master + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + h5bench: + runs-on: ubuntu-24.04 + container: + image: hpcio/hdf5-2.0.0 + timeout-minutes: 60 + env: + OMPI_ALLOW_RUN_AS_ROOT: 1 + OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 + + steps: + - uses: actions/checkout@v2 + with: + submodules: true + + - name: Configuration + run: | + git config --global user.email "ci@github.com" + git config --global user.name "Github CI" + + - name: Dependencies + run: | + # VOL-ASYNC + git clone --recursive https://github.com/hpc-io/vol-async.git --branch develop /opt/vol-async + + - name: Build Argobots + run: | + export ABT_DIR=/opt/argobots + + cd /opt/vol-async/argobots + + ./autogen.sh + ./configure --prefix=$ABT_DIR + + make -j 2 + make install + + - name: Build VOL-ASYNC + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + cd $ASYNC_DIR + mkdir build + cd build + + cmake .. -DCMAKE_POLICY_VERSION_MINIMUM=3.5 -DCMAKE_INSTALL_PREFIX=$ASYNC_DIR -DCMAKE_PREFIX_PATH=$HDF5_DIR -DENABLE_WRITE_MEMCPY=ON + make + make install + + - name: Test VOL-ASYNC + run: | + export HDF5_DIR=/opt/hdf5 + export HDF5_HOME=$HDF5_DIR + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_DIR/lib:$HDF5_DIR/lib:$ABT_DIR/lib:$LD_LIBRARY_PATH + export HDF5_PLUGIN_PATH="$ASYNC_DIR/include" + export HDF5_VOL_CONNECTOR="async under_vol=0;under_info={}" + + cd $ASYNC_DIR/build + + export LD_PRELOAD=$ASYNC_DIR/lib/libh5async.so:$ABT_DIR/lib/libabt.so:$HDF5_DIR/lib/libhdf5.so + + ctest + + - name: Build h5bench SYNC + run: | + export HDF5_HOME=/opt/hdf5 + + mkdir build-sync + cd build-sync + + cmake .. \ + -DH5BENCH_ALL=ON + make -j 2 + sudo make install + ldconfig + + - name: Build h5bench ASYNC + run: | + export HDF5_HOME=/opt/hdf5 + export ABT_HOME=/opt/argobots + export ASYNC_HOME=/opt/vol-async + + export LD_LIBRARY_PATH=$ASYNC_HOME/lib:$HDF5_HOME/lib:$ABT_HOME/lib:$LD_LIBRARY_PATH + + mkdir build-async + cd build-async + + cmake .. \ + -DWITH_ASYNC_VOL:BOOL=ON \ + -DCMAKE_C_FLAGS="-I$ASYNC_HOME/include -L$ASYNC_HOME/lib" \ + -DH5BENCH_ALL=ON + make -j 2 + sudo make install + ldconfig + + - name: Test h5bench SYNC write/read + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-read-contig-1d-small.json + + - name: Test h5bench SYNC write 1D contiguous (memory) strided (file) + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-strided.json + + - name: Test h5bench SYNC write 1D contiguous (memory) contiguous (file) + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-contig.json + + - name: Test h5bench SYNC write 1D contiguous (memory) interleaved (file) + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-interleaved.json + + - name: Test h5bench SYNC write 1D interleaved (memory) contiguous (file) + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-interleaved-contig.json + + - name: Test h5bench SYNC write 1D interleaved (memory) interleaved (file) + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-interleaved-interleaved.json + + - name: Test h5bench SYNC write 2D contiguous (memory) contiguous (file) + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-contig-contig.json + + - name: Test h5bench SYNC write 2D contiguous (memory) interleaved (file) + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-contig-interleaved.json + + - name: Test h5bench SYNC write 2D interleaved (memory) contiguous (file) + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-interleaved-contig.json + + - name: Test h5bench SYNC write 2D interleaved (memory) interleaved (file) + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-interleaved-interleaved.json + + - name: Test h5bench SYNC write 3D contiguous (memory) contiguous (file) + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-3d-contig-contig.json + + - name: Test h5bench SYNC read 1D contiguous (memory) contiguous (file) full + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-contig-read-full.json + + - name: Test h5bench SYNC read 1D contiguous (memory) contiguous (file) partial + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-contig-read-partial.json + + - name: Test h5bench SYNC read 1D contiguous (memory) contiguous (file) strided + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-1d-contig-contig-read-strided.json + + - name: Test h5bench SYNC read 2D contiguous (memory) contiguous (file) full + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-contig-contig-read-full.json + + - name: Test h5bench SYNC read 3D contiguous (memory) contiguous (file) full + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure --validate-mode ../samples/sync-write-2d-contig-contig-read-full.json + + - name: Test h5bench SYNC write unlimited + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure ../samples/sync-write-unlimited.json + + - name: Test h5bench SYNC overwrite + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure ../samples/sync-overwrite.json + + - name: Test h5bench SYNC append + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure ../samples/sync-append.json + + - name: Test h5bench SYNC exerciser + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure ../samples/sync-exerciser.json + + - name: Test h5bench SYNC metadata + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure ../samples/sync-metadata.json + + - name: Test h5bench SYNC amrex + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure ../samples/sync-amrex.json + + - name: Test h5bench SYNC openpmd + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure ../samples/sync-openpmd.json + + - name: Test h5bench SYNC e3sm + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure ../samples/sync-e3sm.json + + - name: Test h5bench SYNC macsio + run: | + export LD_LIBRARY_PATH=/opt/hdf5/lib:$LD_LIBRARY_PATH + + cd build-sync + ./h5bench --debug --abort-on-failure ../samples/sync-macsio.json + + - name: Test h5bench ASYNC write/read + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_ASYNC_MAX_MEM_MB=1024 + + cd build-async + + python3 ../samples/update.py ../samples/async-write-read-contig-1d-small.json + + ./h5bench --debug --abort-on-failure ../samples/async-write-read-contig-1d-small.json + + - name: Test h5bench ASYNC write 1D contiguous (memory) strided (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_ASYNC_MAX_MEM_MB=1024 + + cd build-async + + python3 ../samples/update.py ../samples/async-write-1d-contig-strided.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-strided.json + + - name: Test h5bench ASYNC write 1D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_ASYNC_MAX_MEM_MB=1024 + + cd build-async + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig.json + + - name: Test h5bench ASYNC write 1D contiguous (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_ASYNC_MAX_MEM_MB=1024 + + cd build-async + + python3 ../samples/update.py ../samples/async-write-1d-contig-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-interleaved.json + + - name: Test h5bench ASYNC write 1D interleaved (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_ASYNC_MAX_MEM_MB=1024 + + cd build-async + + python3 ../samples/update.py ../samples/async-write-1d-interleaved-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-contig.json + + - name: Test h5bench ASYNC write 1D interleaved (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_ASYNC_MAX_MEM_MB=1024 + + cd build-async + + python3 ../samples/update.py ../samples/async-write-1d-interleaved-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-interleaved-interleaved.json + + - name: Test h5bench ASYNC write 2D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_ASYNC_MAX_MEM_MB=1024 + + cd build-async + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig.json + + - name: Test h5bench ASYNC write 2D contiguous (memory) interleaved (file) + run: | + current="$PWD" + + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_ASYNC_MAX_MEM_MB=1024 + + cd build-async + + python3 ../samples/update.py ../samples/async-write-2d-contig-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-interleaved.json + + - name: Test h5bench ASYNC write 2D interleaved (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_ASYNC_MAX_MEM_MB=1024 + + cd build-async + + python3 ../samples/update.py ../samples/async-write-2d-interleaved-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-contig.json + + - name: Test h5bench ASYNC write 2D interleaved (memory) interleaved (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_ASYNC_MAX_MEM_MB=1024 + + cd build-async + + python3 ../samples/update.py ../samples/async-write-2d-interleaved-interleaved.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-interleaved-interleaved.json + + - name: Test h5bench ASYNC write 3D contiguous (memory) contiguous (file) + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_ASYNC_MAX_MEM_MB=1024 + + cd build-async + + python3 ../samples/update.py ../samples/async-write-3d-contig-contig.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-3d-contig-contig.json + + - name: Test h5bench ASYNC read 1D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_ASYNC_MAX_MEM_MB=1024 + + cd build-async + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-full.json + + - name: Test h5bench ASYNC read 1D contiguous (memory) contiguous (file) partial + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_ASYNC_MAX_MEM_MB=1024 + + cd build-async + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-partial.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-partial.json + + - name: Test h5bench ASYNC read 1D contiguous (memory) contiguous (file) strided + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_ASYNC_MAX_MEM_MB=1024 + + cd build-async + + python3 ../samples/update.py ../samples/async-write-1d-contig-contig-read-strided.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-1d-contig-contig-read-strided.json + + - name: Test h5bench ASYNC read 2D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_ASYNC_MAX_MEM_MB=1024 + + cd build-async + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json + + - name: Test h5bench ASYNC read 3D contiguous (memory) contiguous (file) full + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_ASYNC_MAX_MEM_MB=1024 + + cd build-async + + python3 ../samples/update.py ../samples/async-write-2d-contig-contig-read-full.json + + ./h5bench --debug --abort-on-failure --validate-mode ../samples/async-write-2d-contig-contig-read-full.json + + - name: Test h5bench ASYNC amrex + run: | + export HDF5_DIR=/opt/hdf5 + export ABT_DIR=/opt/argobots + export ASYNC_DIR=/opt/vol-async + export HDF5_ASYNC_MAX_MEM_MB=1024 + + cd build-async + + python3 ../samples/update.py ../samples/async-amrex.json + + ./h5bench --debug --abort-on-failure ../samples/async-amrex.json + + - name: Upload artifact + if: always() + uses: actions/upload-artifact@v4 + with: + name: test + path: build* + retention-days: 5 \ No newline at end of file diff --git a/.github/workflows/h5bench-hdf5-develop-test.yml b/.github/workflows/h5bench-hdf5-develop-test.yml index c89e6cfc..8c0c7239 100644 --- a/.github/workflows/h5bench-hdf5-develop-test.yml +++ b/.github/workflows/h5bench-hdf5-develop-test.yml @@ -13,13 +13,14 @@ jobs: h5bench: runs-on: ubuntu-24.04 timeout-minutes: 60 + allow_failure: true env: OMPI_ALLOW_RUN_AS_ROOT: 1 OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 OMPI_MCA_rmaps_base_oversubscribe: "yes" steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v5 with: submodules: true @@ -32,7 +33,7 @@ jobs: git clone https://github.com/HDFGroup/hdf5.git # VOL-ASYNC - git clone --recursive https://github.com/hpc-io/vol-async.git --branch v1.9 /opt/vol-async + git clone --recursive https://github.com/hpc-io/vol-async.git --branch develop /opt/vol-async python3 -m pip install pytest @@ -45,7 +46,10 @@ jobs: cd hdf5/build - cmake \ + mkdir build + cd build + + CC=mpicc cmake \ -DCMAKE_INSTALL_PREFIX=$HDF5_DIR \ -DHDF5_ENABLE_PARALLEL=ON \ -DHDF5_ENABLE_THREADSAFE=ON \ @@ -54,6 +58,7 @@ jobs: -DHDF5_BUILD_HL_LIB=OFF \ -DHDF5_BUILD_EXAMPLES=OFF \ -DHDF5_BUILD_TOOLS:BOOL=OFF \ + -DBUILD_STATIC_LIBS=OFF \ -DCMAKE_C_COMPILER=mpicc .. make -j 2 @@ -160,7 +165,7 @@ jobs: - name: Upload artifact if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: test path: | diff --git a/.github/workflows/h5bench-hdf5-develop.yml b/.github/workflows/h5bench-hdf5-develop.yml index 444bd674..070d0b8d 100644 --- a/.github/workflows/h5bench-hdf5-develop.yml +++ b/.github/workflows/h5bench-hdf5-develop.yml @@ -9,13 +9,14 @@ jobs: h5bench: runs-on: ubuntu-24.04 timeout-minutes: 60 + allow_failure: true env: OMPI_ALLOW_RUN_AS_ROOT: 1 OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1 OMPI_MCA_rmaps_base_oversubscribe: "yes" steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v5 with: submodules: true @@ -28,7 +29,7 @@ jobs: git clone https://github.com/HDFGroup/hdf5.git # VOL-ASYNC - git clone --recursive https://github.com/hpc-io/vol-async.git --branch v1.9 /opt/vol-async + git clone --recursive https://github.com/hpc-io/vol-async.git --branch develop /opt/vol-async - name: Build HDF5 develop run: | @@ -39,6 +40,9 @@ jobs: cd hdf5/build + mkdir build + cd build + cmake \ -DCMAKE_INSTALL_PREFIX=$HDF5_DIR \ -DHDF5_ENABLE_PARALLEL=ON \ @@ -48,6 +52,7 @@ jobs: -DHDF5_BUILD_HL_LIB=OFF \ -DHDF5_BUILD_EXAMPLES=OFF \ -DHDF5_BUILD_TOOLS:BOOL=OFF \ + -DBUILD_STATIC_LIBS=OFF \ -DCMAKE_C_COMPILER=mpicc .. make -j 2 @@ -645,7 +650,7 @@ jobs: - name: Upload artifact if: always() - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: test path: | diff --git a/CMakeLists.txt b/CMakeLists.txt index 39de9dcf..05ddfc32 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,5 +1,7 @@ cmake_minimum_required(VERSION 3.10 FATAL_ERROR) +set(CMAKE_POLICY_DEFAULT_CMP0077 NEW) + project( h5bench VERSION 1.3.0 @@ -246,19 +248,19 @@ endif() # https://github.com/openPMD/openPMD-api if(H5BENCH_OPENPMD) - set(openPMD_USE_MPI ON) - set(openPMD_USE_HDF5 ON) - set(openPMD_USE_ADIOS1 OFF) - set(openPMD_USE_ADIOS2 OFF) - set(openPMD_USE_JSON OFF) - set(openPMD_USE_PYTHON OFF) - set(openPMD_INSTALL ON) - set(openPMD_BUILD_TESTING OFF) - set(openPMD_BUILD_EXAMPLES OFF) - set(openPMD_BUILD_CLI_TOOLS OFF) - add_subdirectory(openpmd) + set(openPMD_USE_MPI ON CACHE BOOL "" FORCE) + set(openPMD_USE_HDF5 ON CACHE BOOL "" FORCE) + set(openPMD_USE_ADIOS1 OFF CACHE BOOL "" FORCE) + set(openPMD_USE_ADIOS2 OFF CACHE BOOL "" FORCE) + set(openPMD_USE_JSON OFF CACHE BOOL "" FORCE) + set(openPMD_USE_PYTHON OFF CACHE BOOL "" FORCE) + set(openPMD_INSTALL ON CACHE BOOL "" FORCE) + set(openPMD_BUILD_TESTING OFF CACHE BOOL "" FORCE) + set(openPMD_BUILD_EXAMPLES OFF CACHE BOOL "" FORCE) + set(openPMD_BUILD_CLI_TOOLS OFF CACHE BOOL "" FORCE) + add_executable(h5bench_openpmd_write openpmd/examples/8a_benchmark_write_parallel.cpp) target_link_libraries(h5bench_openpmd_write openPMD hdf5 MPI::MPI_C) @@ -315,6 +317,7 @@ if(H5BENCH_MACSIO) -DWITH_HDF5_PREFIX=${HDF5_HOME} -DWITH_JSON-CWX_PREFIX=${JSON_HOME} -DCMAKE_INSTALL_PREFIX:PATH=${CMAKE_CURRENT_BINARY_DIR} + -DCMAKE_POLICY_VERSION_MINIMUM=3.5 BUILD_COMMAND make COMMAND ${CMAKE_COMMAND} -E copy ${CMAKE_CURRENT_BINARY_DIR}/h5bench_macsio-prefix/src/h5bench_macsio-build/macsio/macsio ${CMAKE_CURRENT_BINARY_DIR}/h5bench_macsio LOG_CONFIGURE 1 diff --git a/commons/h5bench_util.c b/commons/h5bench_util.c index 5f898c86..ad7a3b1a 100644 --- a/commons/h5bench_util.c +++ b/commons/h5bench_util.c @@ -248,9 +248,18 @@ mem_monitor_check_run(mem_monitor *mon, unsigned long *metadata_time_total, unsi return 0; } +/** + * data_wait_time_per_step, metadata_wait_time_per_step can + * safely but set to NULL if info is not needed. + */ int -mem_monitor_final_run(mem_monitor *mon, unsigned long *metadata_time_total, unsigned long *data_time_total) +mem_monitor_final_run(mem_monitor *mon, unsigned long *metadata_time_total, unsigned long *data_time_total, + unsigned long *data_wait_time_per_step, unsigned long *metadata_wait_time_per_step) { + if (metadata_wait_time_per_step != NULL) + memset(metadata_wait_time_per_step, 0, mon->time_step_cnt * sizeof(unsigned long)); + if (data_wait_time_per_step) + memset(data_wait_time_per_step, 0, mon->time_step_cnt * sizeof(unsigned long)); *metadata_time_total = 0; *data_time_total = 0; size_t num_in_progress; @@ -292,7 +301,6 @@ mem_monitor_final_run(mem_monitor *mon, unsigned long *metadata_time_total, unsi ts_run->status = TS_READY; } } - t2 = get_time_usec(); meta_time += (t2 - t1); @@ -317,6 +325,10 @@ mem_monitor_final_run(mem_monitor *mon, unsigned long *metadata_time_total, unsi t6 = get_time_usec(); + if (metadata_wait_time_per_step != NULL) + metadata_wait_time_per_step[i] = ((t2 - t1) + (t4 - t3)); + if (data_wait_time_per_step != NULL) + data_wait_time_per_step[i] = (t3 - t2); meta_time += ((t2 - t1) + (t4 - t3)); data_time += (t3 - t2); ts_run->status = TS_DONE; diff --git a/commons/h5bench_util.h b/commons/h5bench_util.h index 5fc8adb4..b4e9d1ca 100644 --- a/commons/h5bench_util.h +++ b/commons/h5bench_util.h @@ -200,7 +200,8 @@ int ts_delayed_close(mem_monitor *mon, unsigned long *metadata_time_tot int mem_monitor_check_run(mem_monitor *mon, unsigned long *metadata_time_total, unsigned long *data_time_total); int mem_monitor_final_run(mem_monitor *mon, unsigned long *metadata_time_total, - unsigned long *data_time_total); + unsigned long *data_time_total, unsigned long *data_wait_time_per_step, + unsigned long *metadata_wait_time_per_step); // Uniform random number float uniform_random_number(); diff --git a/docker/ubuntu-24.04-hdf5-2.0.0/Dockerfile b/docker/ubuntu-24.04-hdf5-2.0.0/Dockerfile new file mode 100644 index 00000000..2b402d3a --- /dev/null +++ b/docker/ubuntu-24.04-hdf5-2.0.0/Dockerfile @@ -0,0 +1,51 @@ +FROM ubuntu:focal + +LABEL Description="Ubuntu 24.04 environment with HDF5 2.0.0" + +ENV DEBIAN_FRONTEND=noninteractive +ENV HDF5_LIBTOOL=/usr/bin/libtoolize + +RUN apt-get update \ + && apt-get install -y \ + git \ + curl \ + wget \ + sudo \ + gpg \ + ca-certificates \ + m4 \ + autoconf \ + automake \ + libtool \ + pkg-config \ + cmake \ + libtool \ + zlib1g-dev \ + python3 \ + python3-pip \ + python3-dev \ + python3-setuptools \ + gcc \ + g++ \ + libopenmpi-dev \ + software-properties-common + +RUN wget -O - https://apt.kitware.com/keys/kitware-archive-latest.asc 2>/dev/null | gpg --dearmor - | sudo tee /etc/apt/trusted.gpg.d/kitware.gpg >/dev/null \ + && sudo apt-add-repository 'deb https://apt.kitware.com/ubuntu/ focal main' \ + && apt-get update \ + && apt-get install cmake -y \ + && pip3 install psutil + +RUN wget https://github.com/HDFGroup/hdf5/archive/refs/tags/2.0.0.tar.gz \ + && tar zxvf 2.0.0.tar.gz \ + && mv hdf5-2.0.0 hdf5 \ + && cd hdf5 \ + && mkdir build \ + && cd build \ + && CC=mpicc cmake -DCMAKE_INSTALL_PREFIX=/opt/hdf5 -DHDF5_ENABLE_PARALLEL=ON -DHDF5_ALLOW_UNSUPPORTED=ON -DHDF5_ENABLE_THREADSAFE=ON .. \ + && make -j 8 \ + && make install + +RUN rm -rf /var/lib/apt/lists/* \ + && apt-get clean \ + && apt-get autoclean \ No newline at end of file diff --git a/h5bench_patterns/h5bench_append.c b/h5bench_patterns/h5bench_append.c index 8c11376d..5e988ea5 100644 --- a/h5bench_patterns/h5bench_append.c +++ b/h5bench_patterns/h5bench_append.c @@ -473,7 +473,7 @@ _run_benchmark_modify(hid_t file_id, hid_t fapl, hid_t gapl, hid_t filespace, be *inner_metadata_time += (meta_time1 + meta_time2 + meta_time3 + meta_time4 + meta_time5); } - mem_monitor_final_run(MEM_MONITOR, &metadata_time_imp, &read_time_imp); + mem_monitor_final_run(MEM_MONITOR, &metadata_time_imp, &read_time_imp, NULL, NULL); *raw_read_time_out += read_time_imp; *inner_metadata_time += metadata_time_imp; *total_data_size_out = nts * actual_read_cnt * (6 * sizeof(float) + 2 * sizeof(int)); diff --git a/h5bench_patterns/h5bench_overwrite.c b/h5bench_patterns/h5bench_overwrite.c index d67275c0..aefd532e 100644 --- a/h5bench_patterns/h5bench_overwrite.c +++ b/h5bench_patterns/h5bench_overwrite.c @@ -449,7 +449,7 @@ _run_benchmark_modify(hid_t file_id, hid_t fapl, hid_t gapl, hid_t filespace, be *inner_metadata_time += (meta_time1 + meta_time2 + meta_time3 + meta_time4 + meta_time5); } - mem_monitor_final_run(MEM_MONITOR, &metadata_time_imp, &read_time_imp); + mem_monitor_final_run(MEM_MONITOR, &metadata_time_imp, &read_time_imp, NULL, NULL); *raw_read_time_out += read_time_imp; *inner_metadata_time += metadata_time_imp; *total_data_size_out = nts * actual_read_cnt * (6 * sizeof(float) + 2 * sizeof(int)); diff --git a/h5bench_patterns/h5bench_read.c b/h5bench_patterns/h5bench_read.c index 59d45558..e4d14ba3 100644 --- a/h5bench_patterns/h5bench_read.c +++ b/h5bench_patterns/h5bench_read.c @@ -468,10 +468,14 @@ set_dataspace(bench_params params, unsigned long long try_read_elem_cnt, hid_t * int _run_benchmark_read(hid_t file_id, hid_t fapl, hid_t gapl, hid_t filespace, bench_params params, unsigned long *total_data_size_out, unsigned long *raw_read_time_out, - unsigned long *inner_metadata_time) + unsigned long *inner_metadata_time, unsigned long *data_time_per_step, + unsigned long *metadata_time_per_step, unsigned long *data_wait_time_per_step, + unsigned long *metadata_wait_time_per_step) { - *raw_read_time_out = 0; - *inner_metadata_time = 0; + *raw_read_time_out = 0; + *inner_metadata_time = 0; + memset(metadata_time_per_step, 0, params.cnt_time_step * sizeof(unsigned long)); + memset(data_time_per_step, 0, params.cnt_time_step * sizeof(unsigned long)); int nts = params.cnt_time_step; unsigned long long read_elem_cnt = params.try_num_particles; hid_t grp; @@ -563,11 +567,14 @@ _run_benchmark_read(hid_t file_id, hid_t fapl, hid_t gapl, hid_t filespace, benc } } - *raw_read_time_out += (read_time_exp + read_time_imp); - *inner_metadata_time += (meta_time1 + meta_time2 + meta_time3 + meta_time4 + meta_time5); + metadata_time_per_step[ts_index] = (meta_time1 + meta_time2 + meta_time3 + meta_time4 + meta_time5); + data_time_per_step[ts_index] = (read_time_exp + read_time_imp); + *raw_read_time_out += data_time_per_step[ts_index]; + *inner_metadata_time += metadata_time_per_step[ts_index]; } - mem_monitor_final_run(MEM_MONITOR, &metadata_time_imp, &read_time_imp); + mem_monitor_final_run(MEM_MONITOR, &metadata_time_imp, &read_time_imp, data_wait_time_per_step, + metadata_wait_time_per_step); *raw_read_time_out += read_time_imp; *inner_metadata_time += metadata_time_imp; *total_data_size_out = nts * actual_read_cnt * (6 * sizeof(float) + 2 * sizeof(int)); @@ -609,13 +616,12 @@ main(int argc, char *argv[]) assert(MPI_THREAD_MULTIPLE == mpi_thread_lvl_provided); MPI_Comm_rank(MPI_COMM_WORLD, &MY_RANK); MPI_Comm_size(MPI_COMM_WORLD, &NUM_RANKS); - - int sleep_time = 0; - - bench_params params; - - char *cfg_file_path = argv[1]; - char *file_name = argv[2]; // data file to read + int sleep_time = 0; + bench_params params; + char * cfg_file_path = argv[1]; + char * file_name = argv[2]; // data file to read + unsigned long *data_time_per_step = NULL, *metadata_time_per_step = NULL; + unsigned long *data_wait_time_per_step = NULL, *metadata_wait_time_per_step = NULL; if (MY_RANK == 0) { printf("Configuration file: %s\n", argv[1]); @@ -709,6 +715,11 @@ main(int argc, char *argv[]) printf("Number of particles available per rank: %llu \n", NUM_PARTICLES); } + data_time_per_step = malloc(NUM_TIMESTEPS * sizeof(unsigned long)); + metadata_time_per_step = malloc(NUM_TIMESTEPS * sizeof(unsigned long)); + data_wait_time_per_step = malloc(NUM_TIMESTEPS * sizeof(unsigned long)); + metadata_wait_time_per_step = malloc(NUM_TIMESTEPS * sizeof(unsigned long)); + MPI_Barrier(MPI_COMM_WORLD); MPI_Allreduce(&NUM_PARTICLES, &TOTAL_PARTICLES, 1, MPI_LONG_LONG, MPI_SUM, MPI_COMM_WORLD); @@ -730,7 +741,8 @@ main(int argc, char *argv[]) unsigned long raw_read_time, metadata_time, local_data_size; int ret = _run_benchmark_read(file_id, fapl, gapl, filespace, params, &local_data_size, &raw_read_time, - &metadata_time); + &metadata_time, data_time_per_step, metadata_time_per_step, + data_wait_time_per_step, metadata_wait_time_per_step); if (ret < 0) { if (MY_RANK == 0) @@ -810,6 +822,29 @@ main(int argc, char *argv[]) value = format_human_readable(or_bs); fprintf(params.csv_fs, "observed rate, %.3f, %cB/s\n", value.value, value.unit); fprintf(params.csv_fs, "observed time, %.3f, %s\n", oct_s, "seconds"); + // Per timestep data time + for (int i = 0; i < NUM_TIMESTEPS; i++) { + float t = (float)data_time_per_step[i] / (1000.0 * 1000.0); + fprintf(params.csv_fs, "timestep %d data time, %.3f, %s\n", i, t, "seconds"); + } + // Per timestep inner metadata time + for (int i = 0; i < NUM_TIMESTEPS; i++) { + float t = (float)metadata_time_per_step[i] / (1000.0 * 1000.0); + fprintf(params.csv_fs, "timestep %d metadata time, %.3f, %s\n", i, t, "seconds"); + } + // Print wait time if async is enabled + if (has_vol_async) { + // Per timestep data wait time + for (int i = 0; i < NUM_TIMESTEPS; i++) { + float t = (float)data_wait_time_per_step[i] / (1000.0 * 1000.0); + fprintf(params.csv_fs, "timestep %d data wait time, %.3f, %s\n", i, t, "seconds"); + } + // Per timestep metadata wait time + for (int i = 0; i < NUM_TIMESTEPS; i++) { + float t = (float)metadata_wait_time_per_step[i] / (1000.0 * 1000.0); + fprintf(params.csv_fs, "timestep %d metadata wait time, %.3f, %s\n", i, t, "seconds"); + } + } fclose(params.csv_fs); } } @@ -824,6 +859,16 @@ main(int argc, char *argv[]) done: H5close(); + + if (data_time_per_step) + free(data_time_per_step); + if (metadata_time_per_step) + free(metadata_time_per_step); + if (data_wait_time_per_step) + free(data_wait_time_per_step); + if (metadata_wait_time_per_step) + free(metadata_wait_time_per_step); + MPI_Finalize(); return 0; } diff --git a/h5bench_patterns/h5bench_write.c b/h5bench_patterns/h5bench_write.c index a7151702..f019834d 100644 --- a/h5bench_patterns/h5bench_write.c +++ b/h5bench_patterns/h5bench_write.c @@ -718,7 +718,9 @@ _prepare_data(bench_params params, hid_t *filespace_out, hid_t *memspace_out, int _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t filespace, hid_t memspace, void *data, unsigned long data_size, unsigned long *total_data_size_out, - unsigned long *data_time_total, unsigned long *metadata_time_total) + unsigned long *data_time_total, unsigned long *metadata_time_total, + unsigned long *data_time_per_step, unsigned long *metadata_time_per_step, + unsigned long *data_wait_time_per_step, unsigned long *metadata_wait_time_per_step) { unsigned long long data_preparation_time; @@ -726,6 +728,8 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files int timestep_cnt = params.cnt_time_step; *metadata_time_total = 0; *data_time_total = 0; + memset(metadata_time_per_step, 0, timestep_cnt * sizeof(unsigned long)); + memset(data_time_per_step, 0, timestep_cnt * sizeof(unsigned long)); char grp_name[128]; int grp_cnt = 0, dset_cnt = 0; hid_t plist_id; //, filespace, memspace; @@ -760,7 +764,7 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files meta_time1 = 0, meta_time2 = 0, meta_time3 = 0, meta_time4 = 0, meta_time5 = 0; time_step *ts = &(MEM_MONITOR->time_steps[ts_index]); MEM_MONITOR->mem_used += ts->mem_size; - // print_mem_bound(MEM_MONITOR); + sprintf(grp_name, "Timestep_%d", ts_index); assert(ts); @@ -842,14 +846,16 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files } } - *metadata_time_total += (meta_time1 + meta_time2 + meta_time3 + meta_time4); - *data_time_total += (data_time_exp + data_time_imp); + metadata_time_per_step[ts_index] = meta_time1 + meta_time2 + meta_time3 + meta_time4; + data_time_per_step[ts_index] = data_time_exp + data_time_imp; + *metadata_time_total += metadata_time_per_step[ts_index]; + *data_time_total += data_time_per_step[ts_index]; } // end for timestep_cnt // all done, check if any timesteps undone - mem_monitor_final_run(MEM_MONITOR, &metadata_time_imp, &data_time_imp); - + mem_monitor_final_run(MEM_MONITOR, &metadata_time_imp, &data_time_imp, data_wait_time_per_step, + metadata_wait_time_per_step); *metadata_time_total += metadata_time_imp; *data_time_total += data_time_imp; @@ -979,10 +985,12 @@ main(int argc, char *argv[]) assert(MPI_THREAD_MULTIPLE == mpi_thread_lvl_provided); MPI_Comm_rank(MPI_COMM_WORLD, &MY_RANK); MPI_Comm_size(MPI_COMM_WORLD, &NUM_RANKS); - MPI_Comm comm = MPI_COMM_WORLD; - MPI_Info info = MPI_INFO_NULL; - char * num_str = "1024 Ks"; - unsigned long long num = 0; + MPI_Comm comm = MPI_COMM_WORLD; + MPI_Info info = MPI_INFO_NULL; + char * num_str = "1024 Ks"; + unsigned long long num = 0; + unsigned long * data_time_per_step = NULL, *metadata_time_per_step = NULL; + unsigned long * data_wait_time_per_step = NULL, *metadata_wait_time_per_step = NULL; char buffer[200]; @@ -1051,6 +1059,11 @@ main(int argc, char *argv[]) unsigned long data_size = 0; unsigned long data_preparation_time = 0; + data_time_per_step = malloc(NUM_TIMESTEPS * sizeof(unsigned long)); + metadata_time_per_step = malloc(NUM_TIMESTEPS * sizeof(unsigned long)); + data_wait_time_per_step = malloc(NUM_TIMESTEPS * sizeof(unsigned long)); + metadata_wait_time_per_step = malloc(NUM_TIMESTEPS * sizeof(unsigned long)); + MPI_Barrier(MPI_COMM_WORLD); MPI_Allreduce(&NUM_PARTICLES, &TOTAL_PARTICLES, 1, MPI_LONG_LONG, MPI_SUM, comm); @@ -1066,9 +1079,7 @@ main(int argc, char *argv[]) ALIGN_THRESHOLD = params.align_threshold; ALIGN_LEN = params.align_len; - if (params.file_per_proc) { - } - else { + if (!params.file_per_proc) { #ifdef HAVE_SUBFILING if (params.subfiling == 1) H5Pset_fapl_subfiling(fapl, NULL); @@ -1098,14 +1109,16 @@ main(int argc, char *argv[]) unsigned long tfopen_end = get_time_usec(); if (MY_RANK == 0) - printf("Opened HDF5 file... \n"); + printf("Opened HDF5 file...\n"); MPI_Barrier(MPI_COMM_WORLD); unsigned long t2 = get_time_usec(); // t2 - t1: metadata: creating/opening unsigned long raw_write_time, inner_metadata_time, local_data_size; - int stat = _run_benchmark_write(params, file_id, fapl, filespace, memspace, data, data_size, - &local_data_size, &raw_write_time, &inner_metadata_time); + int stat = + _run_benchmark_write(params, file_id, fapl, filespace, memspace, data, data_size, &local_data_size, + &raw_write_time, &inner_metadata_time, data_time_per_step, + metadata_time_per_step, data_wait_time_per_step, metadata_wait_time_per_step); if (stat < 0) { if (MY_RANK == 0) @@ -1197,10 +1210,42 @@ main(int argc, char *argv[]) value = format_human_readable(or_bs); fprintf(params.csv_fs, "observed rate, %.3f, %cB/s\n", value.value, value.unit); fprintf(params.csv_fs, "observed time, %.3f, %s\n", oct_s, "seconds"); + // Per timestep data time + for (int i = 0; i < NUM_TIMESTEPS; i++) { + float t = (float)data_time_per_step[i] / (1000.0 * 1000.0); + fprintf(params.csv_fs, "timestep %d data time, %.3f, %s\n", i, t, "seconds"); + } + // Per timestep inner metadata time + for (int i = 0; i < NUM_TIMESTEPS; i++) { + float t = (float)metadata_time_per_step[i] / (1000.0 * 1000.0); + fprintf(params.csv_fs, "timestep %d metadata time, %.3f, %s\n", i, t, "seconds"); + } + // Print wait time if async is enabled + if (has_vol_async) { + // Per timestep data wait time + for (int i = 0; i < NUM_TIMESTEPS; i++) { + float t = (float)data_wait_time_per_step[i] / (1000.0 * 1000.0); + fprintf(params.csv_fs, "timestep %d data wait time, %.3f, %s\n", i, t, "seconds"); + } + // Per timestep metadata wait time + for (int i = 0; i < NUM_TIMESTEPS; i++) { + float t = (float)metadata_wait_time_per_step[i] / (1000.0 * 1000.0); + fprintf(params.csv_fs, "timestep %d metadata wait time, %.3f, %s\n", i, t, "seconds"); + } + } fclose(params.csv_fs); } } + if (data_time_per_step) + free(data_time_per_step); + if (metadata_time_per_step) + free(metadata_time_per_step); + if (data_wait_time_per_step) + free(data_wait_time_per_step); + if (metadata_wait_time_per_step) + free(metadata_wait_time_per_step); + MPI_Finalize(); return 0; } diff --git a/h5bench_patterns/h5bench_write_normal_dist.c b/h5bench_patterns/h5bench_write_normal_dist.c index 46deeb59..37556d0b 100644 --- a/h5bench_patterns/h5bench_write_normal_dist.c +++ b/h5bench_patterns/h5bench_write_normal_dist.c @@ -886,7 +886,7 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files // all done, check if any timesteps undone - mem_monitor_final_run(MEM_MONITOR, &metadata_time_imp, &data_time_imp); + mem_monitor_final_run(MEM_MONITOR, &metadata_time_imp, &data_time_imp, NULL, NULL); *metadata_time_total += metadata_time_imp; *data_time_total += data_time_imp; diff --git a/h5bench_patterns/h5bench_write_unlimited.c b/h5bench_patterns/h5bench_write_unlimited.c index e05ec9e2..ac478d61 100644 --- a/h5bench_patterns/h5bench_write_unlimited.c +++ b/h5bench_patterns/h5bench_write_unlimited.c @@ -818,7 +818,7 @@ _run_benchmark_write(bench_params params, hid_t file_id, hid_t fapl, hid_t files // all done, check if any timesteps undone - mem_monitor_final_run(MEM_MONITOR, &metadata_time_imp, &data_time_imp); + mem_monitor_final_run(MEM_MONITOR, &metadata_time_imp, &data_time_imp, NULL, NULL); *metadata_time_total += metadata_time_imp; *data_time_total += data_time_imp; diff --git a/openpmd b/openpmd index 32cb87d1..0d5e7242 160000 --- a/openpmd +++ b/openpmd @@ -1 +1 @@ -Subproject commit 32cb87d1b0012493287204d7a8c78af9a1141710 +Subproject commit 0d5e7242dd28a68edde2f03a652addec78516cf0