From 02d09936806b20306b617ff28fc52ccc3cb7df99 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 09:37:50 -0600 Subject: [PATCH 01/47] Create codeql.yml --- .github/workflows/codeql.yml | 253 +++++++++++++++++++++++++++++++++++ 1 file changed, 253 insertions(+) create mode 100644 .github/workflows/codeql.yml diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 0000000..9e215a2 --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,253 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL Advanced" + +on: + push: + branches: [ "develop" ] + pull_request: + branches: [ "develop" ] + schedule: + - cron: '43 6 * * 3' + +jobs: + analyze: + name: Analyze c-cpp + # Runner size impacts CodeQL analysis time. To learn more, please see: + # - https://gh.io/recommended-hardware-resources-for-running-codeql + # - https://gh.io/supported-runners-and-hardware-resources + # - https://gh.io/using-larger-runners (GitHub.com only) + # Consider using larger runners or machines with greater resources for possible analysis time improvements. + runs-on: ubuntu-latest + timeout-minutes: 20 + permissions: + # required for all workflows + security-events: write + + # required to fetch internal or private CodeQL packs + packages: read + + strategy: + fail-fast: false + matrix: + include: + - language: c-cpp + build-mode: manual + - language: python + build-mode: none + # CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' + # Use `c-cpp` to analyze code written in C, C++ or both + # Use 'java-kotlin' to analyze code written in Java, Kotlin or both + # Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both + # To learn more about changing the languages that are analyzed or customizing the build mode for your analysis, + # see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning. + # If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how + # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Dependencies + run: | + sudo apt-get update + sudo apt-get install libtool + git config pull.rebase false + # hdf5 + git clone https://github.com/HDFGroup/hdf5.git + # async vol + git clone https://github.com/hpc-io/vol-async.git + # Argobots + git clone https://github.com/pmodels/argobots.git + # h5bench + git clone https://github.com/zhenghh04/h5bench.git + # mpi + sudo apt-get install libopenmpi-dev + # zlib + sudo apt-get install zlib1g-dev + # python3 + sudo apt-get install python3 + + - name: Installation + run: | + export mydir="$PWD" + export EXAHDF5_ROOT=$mydir + export SDK_DIR=$EXAHDF5_ROOT/soft/ + mkdir -p $SDK_DIR + export HDF5_ROOT=$SDK_DIR/hdf5/ + mkdir -p $HDF5_ROOT + export HDF5_HOME=$HDF5_ROOT + export HDF5_DIR=$HDF5_ROOT + export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol/ + mkdir -p $HDF5_VOL_DIR + mkdir -p $HDF5_VOL_DIR/lib/ + mkdir -p $HDF5_VOL_DIR/include/ + export ABT_DIR=$SDK_DIR/argobots/ + mkdir -p $ABT_DIR + # Compile HDF5 + mkdir -p hdf5/build + cd hdf5/build + cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$HDF5_DIR -DHDF5_ENABLE_PARALLEL:BOOL=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DALLOW_UNSUPPORTED:BOOL=ON .. + make -j2 install + cd - + cd argobots + ./autogen.sh + ./configure --prefix=$ABT_DIR + make && make install -j2 + cd - + # Compile Asynchronous VOL connector + mkdir -p vol-async/build + cd vol-async/build + cmake .. -DCMAKE_INSTALL_PREFIX=$HDF5_VOL_DIR -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx + make all install -j2 + cd - + # Compile Cache VOL connector + mkdir -p build + cd build + cmake .. -DCMAKE_INSTALL_PREFIX=$HDF5_VOL_DIR -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx + make all install -j2 + cd - + # Compile h5bench + mkdir -p h5bench/build + cd h5bench/build + cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib -g" + make all install VERBOSE=1 -j2 + cd - + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: c-cpp + build-mode: manual + config-file: ./.github/codeql-config.yml + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # For more details on CodeQL's query packs, refer to: + #https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + queries: +security-extended,security-and-quality + + - name: Test Vol-Cache-Node-Local + run: | + ulimit -d unlimited + ulimit -s unlimited + mkdir -p SSD + export EXAHDF5_ROOT=$PWD + export SDK_DIR=$EXAHDF5_ROOT/soft/ + export HDF5_ROOT=$SDK_DIR/hdf5 + export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol + export ABT_DIR=$SDK_DIR/argobots/ + export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH + export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib + export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH + export HDF5_CACHE_DEBUG=100 + printf "HDF5_CACHE_STORAGE_TYPE: SSD\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" > cache_1.cfg + export LD_PRELOAD=$ABT_DIR/lib/libabt.so + cat cache_1.cfg + HDF5_VOL_CONNECTOR='' prepare_dataset.exe + export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" + for opt in 'yes' 'no' + do + echo "Testing" + HDF5_CACHE_WR=$opt mpirun -np 2 write_cache.exe + HDF5_CACHE_RD=$opt mpirun -np 2 read_cache.exe + HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset.exe + HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe + HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe + HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe + HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5 + done + + - name: Test Vol-Cache-MEMORY + run: | + ulimit -d unlimited + ulimit -s unlimited + mkdir -p SSD + export EXAHDF5_ROOT=$PWD + export SDK_DIR=$EXAHDF5_ROOT/soft/ + export HDF5_ROOT=$SDK_DIR/hdf5 + export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol + export ABT_DIR=$SDK_DIR/argobots/ + export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH + export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib + export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH + export HDF5_CACHE_DEBUG=100 + printf "HDF5_CACHE_STORAGE_TYPE: MEMORY\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 2147483648" > cache_1.cfg + export LD_PRELOAD=$ABT_DIR/lib/libabt.so + cat cache_1.cfg + HDF5_VOL_CONNECTOR='' prepare_dataset.exe + export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" + for opt in 'yes' 'no' + do + echo "Testing" + HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 1 test_dataset.exe + HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 1 write_cache.exe + HDF5_CACHE_RD=$opt mpirun -np 2 read_cache.exe + HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe + HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe + HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe + HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5 + done + + - name: Test Vol-Cache-Global + run: | + mkdir -p SSD + export EXAHDF5_ROOT=$PWD + export SDK_DIR=$EXAHDF5_ROOT/soft/ + export HDF5_ROOT=$SDK_DIR/hdf5 + export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol + export ABT_DIR=$SDK_DIR/argobots/ + export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH + export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib + export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH + export HDF5_CACHE_DEBUG=100 + printf "HDF5_CACHE_STORAGE_TYPE: GLOBAL\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: GLOBAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" > cache_1.cfg + export LD_PRELOAD=$ABT_DIR/lib/libabt.so + cat cache_1.cfg + HDF5_VOL_CONNECTOR='' prepare_dataset.exe + export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" + for opt in 'yes' 'no' + do + echo "Testing" + HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 2 write_cache.exe + HDF5_CACHE_RD=$opt mpirun -np 2 read_cache.exe + HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset.exe + HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe + HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe + HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe + HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5 + done + + - name: Test Vol-Cache-Fusion + run: | + mkdir -p SSD + export EXAHDF5_ROOT=$PWD + export SDK_DIR=$EXAHDF5_ROOT/soft/ + export HDF5_ROOT=$SDK_DIR/hdf5 + export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol + export ABT_DIR=$SDK_DIR/argobots/ + export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH + export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib + export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH + export HDF5_CACHE_DEBUG=100 + printf "HDF5_CACHE_STORAGE_TYPE: MEMORY\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 2147483648\nHDF5_CACHE_FUSION_THRESHOLD: 16777216" > cache_1.cfg + export LD_PRELOAD=$ABT_DIR/lib/libabt.so + cat cache_1.cfg + HDF5_VOL_CONNECTOR='' prepare_dataset.exe + export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" + HDF5_CACHE_WR=yes mpirun -np 2 write_cache.exe --dim 16 16 + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:c-cpp" From ac1dc7c4184c241c749e2c1954777dc594ed58f1 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 09:47:27 -0600 Subject: [PATCH 02/47] fixed syntax --- .github/codeql-config.yml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100644 .github/codeql-config.yml diff --git a/.github/codeql-config.yml b/.github/codeql-config.yml new file mode 100644 index 0000000..dff3c1b --- /dev/null +++ b/.github/codeql-config.yml @@ -0,0 +1,15 @@ +query-filters: + - exclude: + id: 3rdparty + - exclude: + id: cpp/toctou-race-condition + - exclude: + id: cpp/short-global-name + - exclude: + # See: https://codeql.github.com/codeql-query-help/cpp/cpp-commented-out-code/ + id: cpp/commented-out-code +paths: + - 'src' +paths-ignore: + - 'test' + From cc6a34036777a0e67fd4ed9f57e9457c867231cd Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 09:52:16 -0600 Subject: [PATCH 03/47] fixed syntax2 --- .github/workflows/codeql.yml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 9e215a2..49d061c 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -53,10 +53,10 @@ jobs: # If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages steps: - - name: Checkout repository - uses: actions/checkout@v4 + - name: Checkout repository + uses: actions/checkout@v4 - - name: Dependencies + - name: Dependencies run: | sudo apt-get update sudo apt-get install libtool @@ -137,7 +137,7 @@ jobs: #https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs queries: +security-extended,security-and-quality - - name: Test Vol-Cache-Node-Local + - name: Test Vol-Cache-Node-Local run: | ulimit -d unlimited ulimit -s unlimited @@ -247,7 +247,7 @@ jobs: export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" HDF5_CACHE_WR=yes mpirun -np 2 write_cache.exe --dim 16 16 - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 - with: - category: "/language:c-cpp" + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:c-cpp" From c06c64b4a61ed351bd73afed56993c200a642d8b Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 10:10:02 -0600 Subject: [PATCH 04/47] fixed syntax3 --- .github/workflows/codeql.yml | 81 +----------------------------------- 1 file changed, 1 insertion(+), 80 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 49d061c..bded5e9 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -54,7 +54,7 @@ jobs: # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v4.1.1 - name: Dependencies run: | @@ -168,85 +168,6 @@ jobs: HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5 done - - name: Test Vol-Cache-MEMORY - run: | - ulimit -d unlimited - ulimit -s unlimited - mkdir -p SSD - export EXAHDF5_ROOT=$PWD - export SDK_DIR=$EXAHDF5_ROOT/soft/ - export HDF5_ROOT=$SDK_DIR/hdf5 - export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol - export ABT_DIR=$SDK_DIR/argobots/ - export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH - export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib - export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH - export HDF5_CACHE_DEBUG=100 - printf "HDF5_CACHE_STORAGE_TYPE: MEMORY\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 2147483648" > cache_1.cfg - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - cat cache_1.cfg - HDF5_VOL_CONNECTOR='' prepare_dataset.exe - export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" - for opt in 'yes' 'no' - do - echo "Testing" - HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 1 test_dataset.exe - HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 1 write_cache.exe - HDF5_CACHE_RD=$opt mpirun -np 2 read_cache.exe - HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe - HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe - HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe - HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5 - done - - - name: Test Vol-Cache-Global - run: | - mkdir -p SSD - export EXAHDF5_ROOT=$PWD - export SDK_DIR=$EXAHDF5_ROOT/soft/ - export HDF5_ROOT=$SDK_DIR/hdf5 - export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol - export ABT_DIR=$SDK_DIR/argobots/ - export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH - export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib - export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH - export HDF5_CACHE_DEBUG=100 - printf "HDF5_CACHE_STORAGE_TYPE: GLOBAL\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: GLOBAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" > cache_1.cfg - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - cat cache_1.cfg - HDF5_VOL_CONNECTOR='' prepare_dataset.exe - export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" - for opt in 'yes' 'no' - do - echo "Testing" - HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 2 write_cache.exe - HDF5_CACHE_RD=$opt mpirun -np 2 read_cache.exe - HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset.exe - HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe - HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe - HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe - HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5 - done - - - name: Test Vol-Cache-Fusion - run: | - mkdir -p SSD - export EXAHDF5_ROOT=$PWD - export SDK_DIR=$EXAHDF5_ROOT/soft/ - export HDF5_ROOT=$SDK_DIR/hdf5 - export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol - export ABT_DIR=$SDK_DIR/argobots/ - export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH - export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib - export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH - export HDF5_CACHE_DEBUG=100 - printf "HDF5_CACHE_STORAGE_TYPE: MEMORY\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 2147483648\nHDF5_CACHE_FUSION_THRESHOLD: 16777216" > cache_1.cfg - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - cat cache_1.cfg - HDF5_VOL_CONNECTOR='' prepare_dataset.exe - export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" - HDF5_CACHE_WR=yes mpirun -np 2 write_cache.exe --dim 16 16 - - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v3 with: From e26a4edce28f0586db8b2defbe7a1fbae398d309 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 10:35:49 -0600 Subject: [PATCH 05/47] fixed syntax4 --- .github/workflows/codeql.yml | 31 +++++++++++++++++-------------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index bded5e9..5bc8cf9 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -76,7 +76,7 @@ jobs: # python3 sudo apt-get install python3 - - name: Installation + - name: Installation (dependences) run: | export mydir="$PWD" export EXAHDF5_ROOT=$mydir @@ -108,19 +108,6 @@ jobs: cd vol-async/build cmake .. -DCMAKE_INSTALL_PREFIX=$HDF5_VOL_DIR -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx make all install -j2 - cd - - # Compile Cache VOL connector - mkdir -p build - cd build - cmake .. -DCMAKE_INSTALL_PREFIX=$HDF5_VOL_DIR -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx - make all install -j2 - cd - - # Compile h5bench - mkdir -p h5bench/build - cd h5bench/build - cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib -g" - make all install VERBOSE=1 -j2 - cd - # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL @@ -137,6 +124,22 @@ jobs: #https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs queries: +security-extended,security-and-quality + - name: Installation (Cache VOL connector) + run: | + cd - + # Compile Cache VOL connector + mkdir -p build + cd build + cmake .. -DCMAKE_INSTALL_PREFIX=$HDF5_VOL_DIR -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx + make all install -j2 + cd - + # Compile h5bench + mkdir -p h5bench/build + cd h5bench/build + cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib -g" + make all install VERBOSE=1 -j2 + cd - + - name: Test Vol-Cache-Node-Local run: | ulimit -d unlimited From c071d5d09fc67b7e2743a3b6e2b8d813f0bf1dd6 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 10:49:05 -0600 Subject: [PATCH 06/47] fixed syntax5 --- .github/workflows/codeql.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 5bc8cf9..f72388d 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -138,10 +138,10 @@ jobs: cd h5bench/build cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib -g" make all install VERBOSE=1 -j2 - cd - - name: Test Vol-Cache-Node-Local run: | + cd - ulimit -d unlimited ulimit -s unlimited mkdir -p SSD From 20a90c677d10cf450c4bb420c1b1d4fad88a2a74 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 11:36:20 -0600 Subject: [PATCH 07/47] fixed syntax6 --- .github/workflows/codeql.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index f72388d..85407c5 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -126,7 +126,7 @@ jobs: - name: Installation (Cache VOL connector) run: | - cd - + cd ~ # Compile Cache VOL connector mkdir -p build cd build @@ -141,7 +141,7 @@ jobs: - name: Test Vol-Cache-Node-Local run: | - cd - + cd ~ ulimit -d unlimited ulimit -s unlimited mkdir -p SSD From 7284948d2797249ae1278bbce326a47c4fab7b09 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 11:50:22 -0600 Subject: [PATCH 08/47] fixed syntax7 --- .github/workflows/codeql.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 85407c5..4ec412f 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -126,7 +126,6 @@ jobs: - name: Installation (Cache VOL connector) run: | - cd ~ # Compile Cache VOL connector mkdir -p build cd build @@ -141,7 +140,6 @@ jobs: - name: Test Vol-Cache-Node-Local run: | - cd ~ ulimit -d unlimited ulimit -s unlimited mkdir -p SSD From 39565efb8f820161b4d8dcb0768fe784a6272f8a Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 14:12:50 -0600 Subject: [PATCH 09/47] fixed syntax8 --- .github/workflows/codeql.yml | 13 ++++++------- .github/workflows/linux.yml | 2 +- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 4ec412f..1389424 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -42,8 +42,6 @@ jobs: include: - language: c-cpp build-mode: manual - - language: python - build-mode: none # CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' # Use `c-cpp` to analyze code written in C, C++ or both # Use 'java-kotlin' to analyze code written in Java, Kotlin or both @@ -80,13 +78,18 @@ jobs: run: | export mydir="$PWD" export EXAHDF5_ROOT=$mydir + echo "EXAHDF5_ROOT=$mydir" >> $GITHUB_ENV export SDK_DIR=$EXAHDF5_ROOT/soft/ + echo "SDK_DIR=$EXAHDF5_ROOT/soft/" >> $GITHUB_ENV mkdir -p $SDK_DIR export HDF5_ROOT=$SDK_DIR/hdf5/ + echo "HDF5_ROOT=$SDK_DIR/hdf5/" >> $GITHUB_ENV mkdir -p $HDF5_ROOT export HDF5_HOME=$HDF5_ROOT export HDF5_DIR=$HDF5_ROOT + echo "HDF5_DIR=$HDF5_ROOT" >> $GITHUB_ENV export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol/ + echo "HDF5_VOL_DIR=$SDK_DIR/hdf5/vol/" >> $GITHUB_ENV mkdir -p $HDF5_VOL_DIR mkdir -p $HDF5_VOL_DIR/lib/ mkdir -p $HDF5_VOL_DIR/include/ @@ -95,7 +98,7 @@ jobs: # Compile HDF5 mkdir -p hdf5/build cd hdf5/build - cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$HDF5_DIR -DHDF5_ENABLE_PARALLEL:BOOL=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DALLOW_UNSUPPORTED:BOOL=ON .. + cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$HDF5_DIR -DHDF5_ENABLE_PARALLEL:BOOL=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DHDF5_ALLOW_UNSUPPORTED:BOOL=ON .. make -j2 install cd - cd argobots @@ -143,10 +146,6 @@ jobs: ulimit -d unlimited ulimit -s unlimited mkdir -p SSD - export EXAHDF5_ROOT=$PWD - export SDK_DIR=$EXAHDF5_ROOT/soft/ - export HDF5_ROOT=$SDK_DIR/hdf5 - export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol export ABT_DIR=$SDK_DIR/argobots/ export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index 0c666a6..da55d18 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -60,7 +60,7 @@ jobs: # Compile HDF5 mkdir -p hdf5/build cd hdf5/build - cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$HDF5_DIR -DHDF5_ENABLE_PARALLEL:BOOL=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DALLOW_UNSUPPORTED:BOOL=ON .. + cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$HDF5_DIR -DHDF5_ENABLE_PARALLEL:BOOL=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DHDF5_ALLOW_UNSUPPORTED:BOOL=ON .. make -j2 install cd - cd argobots From 9050634d0ded8358765cca7cb21b2df996ca186b Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 14:25:45 -0600 Subject: [PATCH 10/47] fixed syntax9 --- .github/workflows/codeql.yml | 12 ++++++------ .github/workflows/linux.yml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 1389424..e05feac 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -94,6 +94,7 @@ jobs: mkdir -p $HDF5_VOL_DIR/lib/ mkdir -p $HDF5_VOL_DIR/include/ export ABT_DIR=$SDK_DIR/argobots/ + echo "ABT_DIR=$SDK_DIR/argobots/" >> $GITHUB_ENV mkdir -p $ABT_DIR # Compile HDF5 mkdir -p hdf5/build @@ -136,17 +137,16 @@ jobs: make all install -j2 cd - # Compile h5bench - mkdir -p h5bench/build - cd h5bench/build - cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib -g" - make all install VERBOSE=1 -j2 + #mkdir -p h5bench/build + #cd h5bench/build + #cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib -g" + #make all install VERBOSE=1 -j2 - name: Test Vol-Cache-Node-Local run: | ulimit -d unlimited ulimit -s unlimited mkdir -p SSD - export ABT_DIR=$SDK_DIR/argobots/ export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH @@ -165,7 +165,7 @@ jobs: HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe - HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5 + # HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5 done - name: Perform CodeQL Analysis diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml index da55d18..2fe6de4 100644 --- a/.github/workflows/linux.yml +++ b/.github/workflows/linux.yml @@ -16,7 +16,7 @@ on: jobs: vol-cache: runs-on: ubuntu-latest - timeout-minutes: 60 + timeout-minutes: 20 steps: - uses: actions/checkout@v4.1.1 From d6208c9f9d1972590a04254302552673ac0829d5 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 14:44:14 -0600 Subject: [PATCH 11/47] fixed syntax10 --- .github/workflows/codeql.yml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index e05feac..2913e79 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -138,9 +138,11 @@ jobs: cd - # Compile h5bench #mkdir -p h5bench/build - #cd h5bench/build - #cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib -g" - #make all install VERBOSE=1 -j2 + ls /$HDF5_VOL_DIR/include + ls /$HDF5_VOL_DIR/lib + cd h5bench/build + cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib -g" + make all install VERBOSE=1 -j2 - name: Test Vol-Cache-Node-Local run: | From 7d86bdf8d07adb38a359a11dfde7dfac33d77787 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 14:53:45 -0600 Subject: [PATCH 12/47] fixed syntax11 --- .github/workflows/codeql.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 2913e79..88f8c8f 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -137,11 +137,11 @@ jobs: make all install -j2 cd - # Compile h5bench - #mkdir -p h5bench/build + mkdir -p h5bench/build ls /$HDF5_VOL_DIR/include ls /$HDF5_VOL_DIR/lib cd h5bench/build - cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib -g" + cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I$HDF5_VOL_DIR/include -L$HDF5_VOL_DIR/lib -g" make all install VERBOSE=1 -j2 - name: Test Vol-Cache-Node-Local From adcfab10cd7f1bdad4c5227f923710800e1aafbf Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 15:14:15 -0600 Subject: [PATCH 13/47] fixed syntax12 --- .github/workflows/codeql.yml | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 88f8c8f..284b819 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -79,17 +79,17 @@ jobs: export mydir="$PWD" export EXAHDF5_ROOT=$mydir echo "EXAHDF5_ROOT=$mydir" >> $GITHUB_ENV - export SDK_DIR=$EXAHDF5_ROOT/soft/ - echo "SDK_DIR=$EXAHDF5_ROOT/soft/" >> $GITHUB_ENV + export SDK_DIR=$EXAHDF5_ROOT/soft + echo "SDK_DIR=$EXAHDF5_ROOT/soft" >> $GITHUB_ENV mkdir -p $SDK_DIR - export HDF5_ROOT=$SDK_DIR/hdf5/ - echo "HDF5_ROOT=$SDK_DIR/hdf5/" >> $GITHUB_ENV + export HDF5_ROOT=$SDK_DIR/hdf5 + echo "HDF5_ROOT=$SDK_DIR/hdf5" >> $GITHUB_ENV mkdir -p $HDF5_ROOT export HDF5_HOME=$HDF5_ROOT export HDF5_DIR=$HDF5_ROOT echo "HDF5_DIR=$HDF5_ROOT" >> $GITHUB_ENV - export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol/ - echo "HDF5_VOL_DIR=$SDK_DIR/hdf5/vol/" >> $GITHUB_ENV + export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol + echo "HDF5_VOL_DIR=$SDK_DIR/hdf5/vol" >> $GITHUB_ENV mkdir -p $HDF5_VOL_DIR mkdir -p $HDF5_VOL_DIR/lib/ mkdir -p $HDF5_VOL_DIR/include/ @@ -130,16 +130,17 @@ jobs: - name: Installation (Cache VOL connector) run: | + echo "$HDF5_VOL_DIR" + ls $HDF5_VOL_DIR/include + ls $HDF5_VOL_DIR/lib # Compile Cache VOL connector mkdir -p build cd build cmake .. -DCMAKE_INSTALL_PREFIX=$HDF5_VOL_DIR -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx make all install -j2 - cd - + cd .. # Compile h5bench mkdir -p h5bench/build - ls /$HDF5_VOL_DIR/include - ls /$HDF5_VOL_DIR/lib cd h5bench/build cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I$HDF5_VOL_DIR/include -L$HDF5_VOL_DIR/lib -g" make all install VERBOSE=1 -j2 From 9ddc93d2959e45e8ad90b43bb8728f4b0a185a0a Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 15:26:48 -0600 Subject: [PATCH 14/47] fixed syntax12 --- .github/workflows/codeql.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 284b819..ba18319 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -131,6 +131,7 @@ jobs: - name: Installation (Cache VOL connector) run: | echo "$HDF5_VOL_DIR" + ls -aolF ls $HDF5_VOL_DIR/include ls $HDF5_VOL_DIR/lib # Compile Cache VOL connector @@ -142,7 +143,7 @@ jobs: # Compile h5bench mkdir -p h5bench/build cd h5bench/build - cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I$HDF5_VOL_DIR/include -L$HDF5_VOL_DIR/lib -g" + cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I$HDF5_DIR/include -L$HDF5__DIR/lib -g" make all install VERBOSE=1 -j2 - name: Test Vol-Cache-Node-Local From 9a3c79fa4a8e8befd33b4880e93a4163d93dba98 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 15:54:25 -0600 Subject: [PATCH 15/47] fixed syntax14 --- .github/workflows/codeql.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index ba18319..d37caf4 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -143,7 +143,7 @@ jobs: # Compile h5bench mkdir -p h5bench/build cd h5bench/build - cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I$HDF5_DIR/include -L$HDF5__DIR/lib -g" + cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I$HDF5_VOL_DIR/include -I$HDF5_DIR/include -L$HDF5_VOL_DIR/lib -L$HDF5_DIR/lib -g" make all install VERBOSE=1 -j2 - name: Test Vol-Cache-Node-Local From 893fbd46dce304e4a2f541430f52036b92734ece Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 16:06:16 -0600 Subject: [PATCH 16/47] fixed syntax14 --- .github/workflows/codeql.yml | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index d37caf4..4db85a0 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -130,10 +130,6 @@ jobs: - name: Installation (Cache VOL connector) run: | - echo "$HDF5_VOL_DIR" - ls -aolF - ls $HDF5_VOL_DIR/include - ls $HDF5_VOL_DIR/lib # Compile Cache VOL connector mkdir -p build cd build @@ -169,7 +165,7 @@ jobs: HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe - # HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5 + HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5 done - name: Perform CodeQL Analysis From 9b2702200ed8c666b5f53edb2bcdac89ac9203f5 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 16:30:36 -0600 Subject: [PATCH 17/47] fixed syntax15 --- .github/codeql-config.yml | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/.github/codeql-config.yml b/.github/codeql-config.yml index dff3c1b..658e23e 100644 --- a/.github/codeql-config.yml +++ b/.github/codeql-config.yml @@ -1,15 +1,28 @@ +# Query filters to include or exclude specific queries query-filters: - exclude: - id: 3rdparty - - exclude: - id: cpp/toctou-race-condition - - exclude: + # See: https://codeql.github.com/codeql-query-help/cpp/cpp-short-global-name/ id: cpp/short-global-name - exclude: # See: https://codeql.github.com/codeql-query-help/cpp/cpp-commented-out-code/ id: cpp/commented-out-code + - exclude: + # See: https://codeql.github.com/codeql-query-help/cpp/cpp-poorly-documented-function/ + id: cpp/poorly-documented-function + - exclude: + # See: https://codeql.github.com/codeql-query-help/cpp/cpp-trivial-switch/ + id: cpp/trivial-switch + - exclude: + # See: https://codeql.github.com/codeql-query-help/cpp/cpp-irregular-enum-init/ + id: cpp/irregular-enum-init + +# Directories to scan for vulnerabilities paths: - - 'src' + - src # Main source directory + +# Directories and files to ignore during the scan paths-ignore: - - 'test' + - tests # Test directory + - benchmarks + - h5bench From b45e20f763b62b8e487a1f652b098b4066b0550c Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 16:42:37 -0600 Subject: [PATCH 18/47] fixed syntax16 --- .github/workflows/codeql.yml | 49 ++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 4db85a0..1ea477f 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -168,6 +168,55 @@ jobs: HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5 done + - name: Test Vol-Cache-MEMORY + run: | + ulimit -d unlimited + ulimit -s unlimited + mkdir -p SSD + export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH + export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib + export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH + export HDF5_CACHE_DEBUG=100 + printf "HDF5_CACHE_STORAGE_TYPE: MEMORY\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 2147483648" > cache_1.cfg + export LD_PRELOAD=$ABT_DIR/lib/libabt.so + cat cache_1.cfg + HDF5_VOL_CONNECTOR='' prepare_dataset.exe + export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" + for opt in 'yes' 'no' + do + echo "Testing" + HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 1 test_dataset.exe + HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 1 write_cache.exe + HDF5_CACHE_RD=$opt mpirun -np 2 read_cache.exe + HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe + HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe + HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe + HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5 + done + + - name: Test Vol-Cache-Global + run: | + mkdir -p SSD + export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH + export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib + export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH + export HDF5_CACHE_DEBUG=100 + printf "HDF5_CACHE_STORAGE_TYPE: GLOBAL\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: GLOBAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" > cache_1.cfg + export LD_PRELOAD=$ABT_DIR/lib/libabt.so + cat cache_1.cfg + HDF5_VOL_CONNECTOR='' prepare_dataset.exe + export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" + for opt in 'yes' 'no' + do + echo "Testing" + HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 2 write_cache.exe + HDF5_CACHE_RD=$opt mpirun -np 2 read_cache.exe + HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset.exe + HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe + HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe + HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe + + - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v3 with: From 253533d6f7e1288b25432cd04abb82cee13e64b7 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 16:45:37 -0600 Subject: [PATCH 19/47] fixed syntax16 --- .github/codeql-config.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/codeql-config.yml b/.github/codeql-config.yml index 658e23e..dc0d863 100644 --- a/.github/codeql-config.yml +++ b/.github/codeql-config.yml @@ -22,7 +22,7 @@ paths: # Directories and files to ignore during the scan paths-ignore: - - tests # Test directory - - benchmarks - - h5bench + - ./tests # Test directory + - ./benchmarks + - ./h5bench From 26cb15adf23f7a33ee77df9d642820a1dfd696bb Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 16:49:22 -0600 Subject: [PATCH 20/47] fixed syntax17 --- .github/workflows/codeql.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 1ea477f..ab9ea33 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -168,7 +168,7 @@ jobs: HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5 done - - name: Test Vol-Cache-MEMORY + - name: Test Vol-Cache-MEMORY run: | ulimit -d unlimited ulimit -s unlimited From f7dd31b3bfd246ba91b2b1b2731404ea3aa8b7c8 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 17:03:11 -0600 Subject: [PATCH 21/47] fixed syntax18 --- .github/workflows/codeql.yml | 23 ----------------------- 1 file changed, 23 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index ab9ea33..43aefba 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -193,29 +193,6 @@ jobs: HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5 done - - - name: Test Vol-Cache-Global - run: | - mkdir -p SSD - export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH - export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib - export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH - export HDF5_CACHE_DEBUG=100 - printf "HDF5_CACHE_STORAGE_TYPE: GLOBAL\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: GLOBAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" > cache_1.cfg - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - cat cache_1.cfg - HDF5_VOL_CONNECTOR='' prepare_dataset.exe - export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" - for opt in 'yes' 'no' - do - echo "Testing" - HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 2 write_cache.exe - HDF5_CACHE_RD=$opt mpirun -np 2 read_cache.exe - HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset.exe - HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe - HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe - HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe - - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v3 From eb7c21e9480c297764278e143345d5f93649ac88 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 9 Dec 2024 17:23:46 -0600 Subject: [PATCH 22/47] remove h5bench --- .github/workflows/codeql.yml | 13 ++----------- 1 file changed, 2 insertions(+), 11 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 43aefba..2303ca0 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -65,8 +65,6 @@ jobs: git clone https://github.com/hpc-io/vol-async.git # Argobots git clone https://github.com/pmodels/argobots.git - # h5bench - git clone https://github.com/zhenghh04/h5bench.git # mpi sudo apt-get install libopenmpi-dev # zlib @@ -136,18 +134,13 @@ jobs: cmake .. -DCMAKE_INSTALL_PREFIX=$HDF5_VOL_DIR -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx make all install -j2 cd .. - # Compile h5bench - mkdir -p h5bench/build - cd h5bench/build - cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I$HDF5_VOL_DIR/include -I$HDF5_DIR/include -L$HDF5_VOL_DIR/lib -L$HDF5_DIR/lib -g" - make all install VERBOSE=1 -j2 - name: Test Vol-Cache-Node-Local run: | ulimit -d unlimited ulimit -s unlimited mkdir -p SSD - export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH + export PATH=$HDF5_VOL_DIR/bin:$PATH export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH export HDF5_CACHE_DEBUG=100 @@ -165,7 +158,6 @@ jobs: HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe - HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5 done - name: Test Vol-Cache-MEMORY @@ -173,7 +165,7 @@ jobs: ulimit -d unlimited ulimit -s unlimited mkdir -p SSD - export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH + export PATH=$HDF5_VOL_DIR/bin:$PATH export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH export HDF5_CACHE_DEBUG=100 @@ -191,7 +183,6 @@ jobs: HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe - HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5 done - name: Perform CodeQL Analysis From 0869f1d0dd8ba63d94d34f7374f7faa4bb933718 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Tue, 10 Dec 2024 14:52:14 -0600 Subject: [PATCH 23/47] Create codeql.yml --- .github/workflows/codeql.yml | 115 +++++++++++++++++++++++++++++------ 1 file changed, 97 insertions(+), 18 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 2303ca0..2bf98c2 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -42,6 +42,8 @@ jobs: include: - language: c-cpp build-mode: manual + - language: python + build-mode: none # CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' # Use `c-cpp` to analyze code written in C, C++ or both # Use 'java-kotlin' to analyze code written in Java, Kotlin or both @@ -51,10 +53,10 @@ jobs: # If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages steps: - - name: Checkout repository - uses: actions/checkout@v4.1.1 + - name: Checkout repository + uses: actions/checkout@v4 - - name: Dependencies + - name: Dependencies run: | sudo apt-get update sudo apt-get install libtool @@ -72,32 +74,26 @@ jobs: # python3 sudo apt-get install python3 - - name: Installation (dependences) + - name: Installation run: | export mydir="$PWD" export EXAHDF5_ROOT=$mydir - echo "EXAHDF5_ROOT=$mydir" >> $GITHUB_ENV - export SDK_DIR=$EXAHDF5_ROOT/soft - echo "SDK_DIR=$EXAHDF5_ROOT/soft" >> $GITHUB_ENV + export SDK_DIR=$EXAHDF5_ROOT/soft/ mkdir -p $SDK_DIR - export HDF5_ROOT=$SDK_DIR/hdf5 - echo "HDF5_ROOT=$SDK_DIR/hdf5" >> $GITHUB_ENV + export HDF5_ROOT=$SDK_DIR/hdf5/ mkdir -p $HDF5_ROOT export HDF5_HOME=$HDF5_ROOT export HDF5_DIR=$HDF5_ROOT - echo "HDF5_DIR=$HDF5_ROOT" >> $GITHUB_ENV - export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol - echo "HDF5_VOL_DIR=$SDK_DIR/hdf5/vol" >> $GITHUB_ENV + export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol/ mkdir -p $HDF5_VOL_DIR mkdir -p $HDF5_VOL_DIR/lib/ mkdir -p $HDF5_VOL_DIR/include/ export ABT_DIR=$SDK_DIR/argobots/ - echo "ABT_DIR=$SDK_DIR/argobots/" >> $GITHUB_ENV mkdir -p $ABT_DIR # Compile HDF5 mkdir -p hdf5/build cd hdf5/build - cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$HDF5_DIR -DHDF5_ENABLE_PARALLEL:BOOL=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DHDF5_ALLOW_UNSUPPORTED:BOOL=ON .. + cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$HDF5_DIR -DHDF5_ENABLE_PARALLEL:BOOL=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DALLOW_UNSUPPORTED:BOOL=ON .. make -j2 install cd - cd argobots @@ -110,6 +106,19 @@ jobs: cd vol-async/build cmake .. -DCMAKE_INSTALL_PREFIX=$HDF5_VOL_DIR -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx make all install -j2 + cd - + # Compile Cache VOL connector + mkdir -p build + cd build + cmake .. -DCMAKE_INSTALL_PREFIX=$HDF5_VOL_DIR -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx + make all install -j2 + cd - + # Compile h5bench + mkdir -p h5bench/build + cd h5bench/build + cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib -g" + make all install VERBOSE=1 -j2 + cd - # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL @@ -126,6 +135,7 @@ jobs: #https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs queries: +security-extended,security-and-quality +<<<<<<< HEAD - name: Installation (Cache VOL connector) run: | # Compile Cache VOL connector @@ -136,11 +146,23 @@ jobs: cd .. - name: Test Vol-Cache-Node-Local +======= + - name: Test Vol-Cache-Node-Local +>>>>>>> 957a852 (Create codeql.yml) run: | ulimit -d unlimited ulimit -s unlimited mkdir -p SSD +<<<<<<< HEAD export PATH=$HDF5_VOL_DIR/bin:$PATH +======= + export EXAHDF5_ROOT=$PWD + export SDK_DIR=$EXAHDF5_ROOT/soft/ + export HDF5_ROOT=$SDK_DIR/hdf5 + export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol + export ABT_DIR=$SDK_DIR/argobots/ + export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH +>>>>>>> 957a852 (Create codeql.yml) export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH export HDF5_CACHE_DEBUG=100 @@ -165,7 +187,16 @@ jobs: ulimit -d unlimited ulimit -s unlimited mkdir -p SSD +<<<<<<< HEAD export PATH=$HDF5_VOL_DIR/bin:$PATH +======= + export EXAHDF5_ROOT=$PWD + export SDK_DIR=$EXAHDF5_ROOT/soft/ + export HDF5_ROOT=$SDK_DIR/hdf5 + export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol + export ABT_DIR=$SDK_DIR/argobots/ + export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH +>>>>>>> 957a852 (Create codeql.yml) export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH export HDF5_CACHE_DEBUG=100 @@ -184,8 +215,56 @@ jobs: HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe done + + - name: Test Vol-Cache-Global + run: | + mkdir -p SSD + export EXAHDF5_ROOT=$PWD + export SDK_DIR=$EXAHDF5_ROOT/soft/ + export HDF5_ROOT=$SDK_DIR/hdf5 + export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol + export ABT_DIR=$SDK_DIR/argobots/ + export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH + export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib + export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH + export HDF5_CACHE_DEBUG=100 + printf "HDF5_CACHE_STORAGE_TYPE: GLOBAL\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: GLOBAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" > cache_1.cfg + export LD_PRELOAD=$ABT_DIR/lib/libabt.so + cat cache_1.cfg + HDF5_VOL_CONNECTOR='' prepare_dataset.exe + export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" + for opt in 'yes' 'no' + do + echo "Testing" + HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 2 write_cache.exe + HDF5_CACHE_RD=$opt mpirun -np 2 read_cache.exe + HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset.exe + HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe + HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe + HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe + HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5 + done + + - name: Test Vol-Cache-Fusion + run: | + mkdir -p SSD + export EXAHDF5_ROOT=$PWD + export SDK_DIR=$EXAHDF5_ROOT/soft/ + export HDF5_ROOT=$SDK_DIR/hdf5 + export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol + export ABT_DIR=$SDK_DIR/argobots/ + export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH + export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib + export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH + export HDF5_CACHE_DEBUG=100 + printf "HDF5_CACHE_STORAGE_TYPE: MEMORY\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 2147483648\nHDF5_CACHE_FUSION_THRESHOLD: 16777216" > cache_1.cfg + export LD_PRELOAD=$ABT_DIR/lib/libabt.so + cat cache_1.cfg + HDF5_VOL_CONNECTOR='' prepare_dataset.exe + export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" + HDF5_CACHE_WR=yes mpirun -np 2 write_cache.exe --dim 16 16 - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 - with: - category: "/language:c-cpp" + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:c-cpp" From 7c7ee741a60cfd3e77a0edb9a4b4748eaf7d6935 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Thu, 12 Dec 2024 09:34:41 -0600 Subject: [PATCH 24/47] codeQL testing (#1) * fixed HDF5_VOL_CONNECTOR settings for the tests * update the README * lower test timeout * fixed critical codeQL items --- .github/codeql-config.yml | 11 --- .github/workflows/codeql.yml | 145 ++++++++++++----------------------- src/H5LS.c | 9 ++- src/H5VLcache_ext.c | 139 ++++++++++++++++++++++----------- 4 files changed, 152 insertions(+), 152 deletions(-) diff --git a/.github/codeql-config.yml b/.github/codeql-config.yml index dc0d863..5936bd0 100644 --- a/.github/codeql-config.yml +++ b/.github/codeql-config.yml @@ -15,14 +15,3 @@ query-filters: - exclude: # See: https://codeql.github.com/codeql-query-help/cpp/cpp-irregular-enum-init/ id: cpp/irregular-enum-init - -# Directories to scan for vulnerabilities -paths: - - src # Main source directory - -# Directories and files to ignore during the scan -paths-ignore: - - ./tests # Test directory - - ./benchmarks - - ./h5bench - diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 2bf98c2..583771f 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -42,8 +42,6 @@ jobs: include: - language: c-cpp build-mode: manual - - language: python - build-mode: none # CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' # Use `c-cpp` to analyze code written in C, C++ or both # Use 'java-kotlin' to analyze code written in Java, Kotlin or both @@ -53,10 +51,10 @@ jobs: # If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages steps: - - name: Checkout repository - uses: actions/checkout@v4 + - name: Checkout repository + uses: actions/checkout@v4.1.1 - - name: Dependencies + - name: Dependencies run: | sudo apt-get update sudo apt-get install libtool @@ -67,6 +65,8 @@ jobs: git clone https://github.com/hpc-io/vol-async.git # Argobots git clone https://github.com/pmodels/argobots.git + # h5bench + git clone https://github.com/zhenghh04/h5bench.git # mpi sudo apt-get install libopenmpi-dev # zlib @@ -74,26 +74,32 @@ jobs: # python3 sudo apt-get install python3 - - name: Installation + - name: Installation (dependences) run: | export mydir="$PWD" export EXAHDF5_ROOT=$mydir - export SDK_DIR=$EXAHDF5_ROOT/soft/ + echo "EXAHDF5_ROOT=$mydir" >> $GITHUB_ENV + export SDK_DIR=$EXAHDF5_ROOT/soft + echo "SDK_DIR=$EXAHDF5_ROOT/soft" >> $GITHUB_ENV mkdir -p $SDK_DIR - export HDF5_ROOT=$SDK_DIR/hdf5/ + export HDF5_ROOT=$SDK_DIR/hdf5 + echo "HDF5_ROOT=$SDK_DIR/hdf5" >> $GITHUB_ENV mkdir -p $HDF5_ROOT export HDF5_HOME=$HDF5_ROOT export HDF5_DIR=$HDF5_ROOT - export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol/ + echo "HDF5_DIR=$HDF5_ROOT" >> $GITHUB_ENV + export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol + echo "HDF5_VOL_DIR=$SDK_DIR/hdf5/vol" >> $GITHUB_ENV mkdir -p $HDF5_VOL_DIR mkdir -p $HDF5_VOL_DIR/lib/ mkdir -p $HDF5_VOL_DIR/include/ export ABT_DIR=$SDK_DIR/argobots/ + echo "ABT_DIR=$SDK_DIR/argobots/" >> $GITHUB_ENV mkdir -p $ABT_DIR # Compile HDF5 mkdir -p hdf5/build cd hdf5/build - cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$HDF5_DIR -DHDF5_ENABLE_PARALLEL:BOOL=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DALLOW_UNSUPPORTED:BOOL=ON .. + cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=$HDF5_DIR -DHDF5_ENABLE_PARALLEL:BOOL=ON -DHDF5_ENABLE_THREADSAFE:BOOL=ON -DHDF5_ALLOW_UNSUPPORTED:BOOL=ON .. make -j2 install cd - cd argobots @@ -106,19 +112,6 @@ jobs: cd vol-async/build cmake .. -DCMAKE_INSTALL_PREFIX=$HDF5_VOL_DIR -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx make all install -j2 - cd - - # Compile Cache VOL connector - mkdir -p build - cd build - cmake .. -DCMAKE_INSTALL_PREFIX=$HDF5_VOL_DIR -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx - make all install -j2 - cd - - # Compile h5bench - mkdir -p h5bench/build - cd h5bench/build - cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I/$HDF5_VOL_DIR/include -L/$HDF5_VOL_DIR/lib -g" - make all install VERBOSE=1 -j2 - cd - # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL @@ -135,7 +128,6 @@ jobs: #https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs queries: +security-extended,security-and-quality -<<<<<<< HEAD - name: Installation (Cache VOL connector) run: | # Compile Cache VOL connector @@ -144,25 +136,18 @@ jobs: cmake .. -DCMAKE_INSTALL_PREFIX=$HDF5_VOL_DIR -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx make all install -j2 cd .. + # Compile h5bench + mkdir -p h5bench/build + cd h5bench/build + cmake .. -DCMAKE_C_COMPILER=mpicc -DCMAKE_CXX_COMPILER=mpicxx -DCMAKE_INSTALL_PREFIX=$SDK_DIR/h5bench -DWITH_CACHE_VOL:BOOL=ON -DWITH_ASYNC_VOL:BOOL=ON -DCMAKE_C_FLAGS="-I$HDF5_VOL_DIR/include -I$HDF5_DIR/include -L$HDF5_VOL_DIR/lib -L$HDF5_DIR/lib -g" + make all install VERBOSE=1 -j2 - name: Test Vol-Cache-Node-Local -======= - - name: Test Vol-Cache-Node-Local ->>>>>>> 957a852 (Create codeql.yml) run: | ulimit -d unlimited ulimit -s unlimited mkdir -p SSD -<<<<<<< HEAD - export PATH=$HDF5_VOL_DIR/bin:$PATH -======= - export EXAHDF5_ROOT=$PWD - export SDK_DIR=$EXAHDF5_ROOT/soft/ - export HDF5_ROOT=$SDK_DIR/hdf5 - export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol - export ABT_DIR=$SDK_DIR/argobots/ export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH ->>>>>>> 957a852 (Create codeql.yml) export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH export HDF5_CACHE_DEBUG=100 @@ -180,6 +165,7 @@ jobs: HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe + HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5 done - name: Test Vol-Cache-MEMORY @@ -187,16 +173,7 @@ jobs: ulimit -d unlimited ulimit -s unlimited mkdir -p SSD -<<<<<<< HEAD - export PATH=$HDF5_VOL_DIR/bin:$PATH -======= - export EXAHDF5_ROOT=$PWD - export SDK_DIR=$EXAHDF5_ROOT/soft/ - export HDF5_ROOT=$SDK_DIR/hdf5 - export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol - export ABT_DIR=$SDK_DIR/argobots/ export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH ->>>>>>> 957a852 (Create codeql.yml) export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH export HDF5_CACHE_DEBUG=100 @@ -214,57 +191,33 @@ jobs: HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe - done - - - name: Test Vol-Cache-Global - run: | - mkdir -p SSD - export EXAHDF5_ROOT=$PWD - export SDK_DIR=$EXAHDF5_ROOT/soft/ - export HDF5_ROOT=$SDK_DIR/hdf5 - export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol - export ABT_DIR=$SDK_DIR/argobots/ - export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH - export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib - export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH - export HDF5_CACHE_DEBUG=100 - printf "HDF5_CACHE_STORAGE_TYPE: GLOBAL\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: GLOBAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 17179869184" > cache_1.cfg - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - cat cache_1.cfg - HDF5_VOL_CONNECTOR='' prepare_dataset.exe - export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" - for opt in 'yes' 'no' - do - echo "Testing" - HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 2 write_cache.exe - HDF5_CACHE_RD=$opt mpirun -np 2 read_cache.exe - HDF5_CACHE_LOG_LEVEL=debug HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset.exe - HDF5_CACHE_WR=$opt mpirun -np 2 test_dataset_async_api.exe - HDF5_CACHE_WR=$opt mpirun -np 2 test_group.exe - HDF5_CACHE_WR=$opt mpirun -np 2 test_file.exe HDF5_CACHE_WR=$opt mpirun -np 2 h5bench_write ./tests/test_h5bench.cfg test.h5 done - - - name: Test Vol-Cache-Fusion - run: | - mkdir -p SSD - export EXAHDF5_ROOT=$PWD - export SDK_DIR=$EXAHDF5_ROOT/soft/ - export HDF5_ROOT=$SDK_DIR/hdf5 - export HDF5_VOL_DIR=$SDK_DIR/hdf5/vol - export ABT_DIR=$SDK_DIR/argobots/ - export PATH=$EXAHDF5_ROOT/soft/h5bench/bin:$HDF5_VOL_DIR/bin:$PATH - export HDF5_PLUGIN_PATH=$HDF5_VOL_DIR/lib - export LD_LIBRARY_PATH=$HDF5_PLUGIN_PATH:$ABT_DIR/lib:$HDF5_ROOT/lib:$HDF5_VOL_DIR/lib:$LD_LIBRARY_PATH - export HDF5_CACHE_DEBUG=100 - printf "HDF5_CACHE_STORAGE_TYPE: MEMORY\nHDF5_CACHE_STORAGE_PATH: SSD\nHDF5_CACHE_STORAGE_SCOPE: LOCAL\nHDF5_CACHE_STORAGE_SIZE: 128755813888\nHDF5_CACHE_WRITE_BUFFER_SIZE: 2147483648\nHDF5_CACHE_FUSION_THRESHOLD: 16777216" > cache_1.cfg - export LD_PRELOAD=$ABT_DIR/lib/libabt.so - cat cache_1.cfg - HDF5_VOL_CONNECTOR='' prepare_dataset.exe - export HDF5_VOL_CONNECTOR="cache_ext config=cache_1.cfg;under_vol=512;under_info={under_vol=0;under_info={}}" - HDF5_CACHE_WR=yes mpirun -np 2 write_cache.exe --dim 16 16 - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 - with: - category: "/language:c-cpp" + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:c-cpp" + output: sarif-results + upload: failure-only + + - name: filter-sarif + uses: advanced-security/filter-sarif@main + with: + patterns: | + -**/* + src/**/* + input: sarif-results/cpp.sarif + output: sarif-results/cpp.sarif + + - name: Upload SARIF + uses: github/codeql-action/upload-sarif@v3 + with: + sarif_file: sarif-results/cpp.sarif + + - name: Upload loc as a Build Artifact + uses: actions/upload-artifact@v4 + with: + name: sarif-results + path: sarif-results + retention-days: 1 diff --git a/src/H5LS.c b/src/H5LS.c index 6f30cd3..bf5427f 100644 --- a/src/H5LS.c +++ b/src/H5LS.c @@ -144,16 +144,21 @@ herr_t readLSConf(char *fname, cache_storage_t *LS) { linenum++; if (line[0] == '#') continue; - if (sscanf(line, "%[^:]:%s", ip, mac) != 2) { + if (sscanf(line, "%[^:]:%255s", ip, mac) != 2) { if (RANK == io_node()) fprintf(stderr, "Syntax error, line %d\n", linenum); continue; } + if (strlen(ip) >= 256 || strlen(mac) >= 256) { + if (RANK == io_node()) + fprintf(stderr, "Input too long, line %d\n", linenum); + continue; + } if (!strcmp(ip, "HDF5_CACHE_STORAGE_PATH")) if (strcmp(mac, "NULL") == 0) LS->path = NULL; else { - strcpy(LS->path, mac); + snprintf(LS->path, 255, "%s", mac); } else if (!strcmp(ip, "HDF5_CACHE_FUSION_THRESHOLD")) { diff --git a/src/H5VLcache_ext.c b/src/H5VLcache_ext.c index 3178263..3e9868d 100644 --- a/src/H5VLcache_ext.c +++ b/src/H5VLcache_ext.c @@ -44,6 +44,7 @@ #include #include // debug +#define LOG_BUFFER_SIZE 1024 // VOL related header #include "H5LS.h" #include "H5VLcache_ext_private.h" @@ -96,6 +97,7 @@ int RANK = 0; int NPROC = 1; hbool_t HDF5_CACHE_CLOSE_ASYNC = 0; +char log_buffer[LOG_BUFFER_SIZE]; // Functions from async VOL int H5VL_async_set_delay_time(uint64_t time_us); herr_t H5VL_async_set_request_dep(void *request, void *parent_request); @@ -106,7 +108,9 @@ herr_t H5VL_async_start(); #define H5Pcopy(X) \ H5Pcopy(X); \ - LOG_DEBUG(-1, "H5Pcopy called: %s:%d %s\n", __FILE__, __LINE__, __FUNCTION__); + snprintf(log_buffer, LOG_BUFFER_SIZE, "H5Pcopy called: %s:%d %s\n", \ + __FILE__, __LINE__, __FUNCTION__); \ + LOG_DEBUG(-1, "%s", log_buffer); #define H5Scopy(X) \ H5Scopy(X); \ @@ -488,6 +492,7 @@ static herr_t remove_cache(void *obj, void **req) { const H5LS_cache_io_class_t *t = o->H5LS->cache_io_cls; if (o->cache_created == false) { LOG_ERROR(-1, "Cache is not created"); + return FAIL; } o->cache_created = false; if (o->obj_type == H5I_GROUP) @@ -496,12 +501,17 @@ static herr_t remove_cache(void *obj, void **req) { return t->remove_file_cache(obj, req); else if (o->obj_type == H5I_DATASET) return t->remove_dataset_cache(obj, req); + else { + LOG_ERROR(-1, "Unknown object type for cache removal"); + return FAIL; + } } static herr_t create_cache(void *obj, void *arg, void **req) { H5VL_cache_ext_t *o = (H5VL_cache_ext_t *)obj; if (o->cache_created) { LOG_ERROR(-1, "Cache is already created"); + return FAIL; } const H5LS_cache_io_class_t *t = o->H5LS->cache_io_cls; o->cache_created = true; @@ -511,6 +521,10 @@ static herr_t create_cache(void *obj, void *arg, void **req) { return t->create_file_cache(obj, arg, req); else if (o->obj_type == H5I_DATASET) return t->create_dataset_cache(obj, arg, req); + else { + LOG_ERROR(-1, "Unknown object type for cache creation"); + return FAIL; + } } /*******************/ /* Local variables */ @@ -770,19 +784,20 @@ static herr_t async_close_task_wait(object_close_task_t *task) { LOG_WARN(-1, "Close request is NULL."); } #ifndef NDEBUG - LOG_DEBUG(-1, "async task finished %d", task->type); + snprintf(log_buffer, LOG_BUFFER_SIZE, "async task finished %d", task->type); + LOG_DEBUG(-1, "%s", log_buffer); double t1 = MPI_Wtime(); - LOG_DEBUG(-1, - "Delay closed object: %d time: " - "%10.6f", - task->type, t1 - t0); + snprintf(log_buffer, LOG_BUFFER_SIZE, "Delay closed object: %d time: %10.6f", + task->type, t1 - t0); + LOG_DEBUG(-1, "%s", log_buffer); #endif if (o->read_cache || o->write_cache) o->H5LS->cache_io_cls->remove_cache(task->obj, NULL); H5VL_cache_ext_free_obj(o); #ifndef NDEBUG double t2 = MPI_Wtime(); - LOG_DEBUG(-1, "Remove cache time: %10.6f", t2 - t1); + snprintf(log_buffer, LOG_BUFFER_SIZE, "Remove cache time: %10.6f", t2 - t1); + LOG_DEBUG(-1, "%s", log_buffer); #endif free(task->req); return 0; @@ -1422,11 +1437,22 @@ static herr_t H5VL_cache_ext_str_to_info(const char *str, void **_info) { LOG_INFO(-1, " storage path: %s", p->H5LS->path); - LOG_INFO(-1, " storage size: %.4f GiB", - p->H5LS->mspace_total / 1024. / 1024. / 1024.); + int ret = + snprintf(log_buffer, LOG_BUFFER_SIZE, " storage size: %.4f GiB", + p->H5LS->mspace_total / 1024. / 1024. / 1024.); + if (ret < 0 || ret >= LOG_BUFFER_SIZE) { + LOG_WARN(-1, "Log Error when formatting storage size message"); + } else { + LOG_INFO(-1, "%s", log_buffer); + } - LOG_INFO(-1, " write buffer size: %.4f GiB", - p->H5LS->write_buffer_size / 1024. / 1024. / 1024.); + ret = snprintf(log_buffer, LOG_BUFFER_SIZE, " write buffer size: %.4f GiB", + p->H5LS->write_buffer_size / 1024. / 1024. / 1024.); + if (ret < 0 || ret >= LOG_BUFFER_SIZE) { + LOG_WARN(-1, "Log Error when formatting write buffer size message"); + } else { + LOG_INFO(-1, "%s", log_buffer); + } LOG_INFO(-1, " storage type: %s", p->H5LS->type); @@ -2539,11 +2565,12 @@ static herr_t free_cache_space_from_dataset(void *dset, hsize_t size) { } H5VL_request_status_t status; #ifndef NDEBUG - LOG_DEBUG(-1, - "request wait(jobid: %d), current available space: " - "%.5f GiB ", - o->H5DWMM->io->current_request->id, - o->H5DWMM->cache->mspace_per_rank_left / 1024. / 1024. / 1024); + snprintf(log_buffer, LOG_BUFFER_SIZE, + "request wait(jobid: %d), current available space: " + "%.5f GiB ", + o->H5DWMM->io->current_request->id, + o->H5DWMM->cache->mspace_per_rank_left / 1024. / 1024. / 1024); + LOG_DEBUG(-1, "%s", log_buffer); #endif while ((o->H5DWMM->io->current_request != NULL && o->H5DWMM->io->current_request->req != NULL)) { @@ -2627,8 +2654,7 @@ static herr_t merge_tasks_in_queue(task_data_t **task_list, int ntasks) { // nearby write requests. t_com->id = r->id; #ifndef NDEBUG - - LOG_DEBUG(-1, "Merging %d tasks (%d - %d) ", ntasks, t_com->id, + LOG_DEBUG(-1, "Merging %d tasks (%d - %d)", ntasks, t_com->id, t_com->id + ntasks - 1); #endif @@ -2657,8 +2683,8 @@ static herr_t merge_tasks_in_queue(task_data_t **task_list, int ntasks) { free(t_com); double t1 = MPI_Wtime(); #ifndef NDEBUG - LOG_DEBUG(-1, "Merging time: %6.5f", t1 - t0); - + snprintf(log_buffer, LOG_BUFFER_SIZE, "Merging time: %6.5f", t1 - t0); + LOG_DEBUG(-1, "%s", log_buffer); #endif return SUCCEED; } @@ -3066,8 +3092,10 @@ static herr_t H5VL_cache_ext_dataset_wait(void *dset) { } double t1 = MPI_Wtime(); #ifndef NDEBUG - LOG_DEBUG(-1, "H5VLreqeust_wait time (jobid: %d): %f", - o->H5DWMM->io->current_request->id, t1 - t0); + snprintf(log_buffer, LOG_BUFFER_SIZE, + "H5VLreqeust_wait time (jobid: %d): %g", + o->H5DWMM->io->current_request->id, t1 - t0); + LOG_DEBUG(-1, "%s", log_buffer); LOG_DEBUG(-1, "Tasks %d(%ld merged) finished", o->H5DWMM->io->current_request->id, @@ -3218,11 +3246,11 @@ static herr_t H5VL_cache_ext_dataset_close(void *dset, hid_t dxpl_id, p->async_close_task_list->obj = NULL; double t1 = MPI_Wtime(); #ifndef NDEBUG - - LOG_DEBUG(-1, - "dataset close time: " - "%.6f seconds", - t1 - t0); + snprintf(log_buffer, LOG_BUFFER_SIZE, + "dataset close time: " + "%.6f seconds", + t1 - t0); + LOG_DEBUG(-1, "%s", log_buffer); #endif return ret_value; @@ -3234,10 +3262,11 @@ static herr_t H5VL_cache_ext_dataset_close(void *dset, hid_t dxpl_id, double t1 = MPI_Wtime(); #ifndef NDEBUG - LOG_DEBUG(-1, - "dataset remove cache time (including wait time): " - "%.6f seconds", - t1 - t0); + snprintf(log_buffer, LOG_BUFFER_SIZE, + "dataset remove cache time (including wait time): " + "%.6f seconds", + t1 - t0); + LOG_DEBUG(-1, "%s", log_buffer); #endif } @@ -3259,7 +3288,9 @@ static herr_t H5VL_cache_ext_dataset_close(void *dset, hid_t dxpl_id, H5VL_cache_ext_free_obj(o); double tt1 = MPI_Wtime(); #ifndef NDEBUG - LOG_DEBUG(-1, "H5VL_cache_ext_dataset_close time: %.6f seconds", tt1 - tt0); + snprintf(log_buffer, LOG_BUFFER_SIZE, + "H5VL_cache_ext_dataset_close time: %.6f seconds", tt1 - tt0); + LOG_DEBUG(-1, "%s", log_buffer); #endif return ret_value; @@ -5649,15 +5680,30 @@ static herr_t create_dataset_cache_on_local_storage(void *obj, void *dset_args, if (dset->H5LS->path != NULL) { strcpy(dset->H5DRMM->cache->path, p->H5DRMM->cache->path); // create - strcat(dset->H5DRMM->cache->path, "/"); - strcat(dset->H5DRMM->cache->path, name); - strcat(dset->H5DRMM->cache->path, "/"); - strcpy(dset->H5DRMM->mmap->fname, dset->H5DRMM->cache->path); - strcat(dset->H5DRMM->mmap->fname, "/dset-mmap-"); + strncat(dset->H5DRMM->cache->path, "/", + sizeof(dset->H5DRMM->cache->path) - + strlen(dset->H5DRMM->cache->path) - 1); + strncat(dset->H5DRMM->cache->path, name, + sizeof(dset->H5DRMM->cache->path) - + strlen(dset->H5DRMM->cache->path) - 1); + strncat(dset->H5DRMM->cache->path, "/", + sizeof(dset->H5DRMM->cache->path) - + strlen(dset->H5DRMM->cache->path) - 1); + strncpy(dset->H5DRMM->mmap->fname, dset->H5DRMM->cache->path, + sizeof(dset->H5DRMM->mmap->fname) - 1); + dset->H5DRMM->mmap->fname[sizeof(dset->H5DRMM->mmap->fname) - 1] = + '\0'; // Ensure null-termination + strncat(dset->H5DRMM->mmap->fname, "/dset-mmap-", + sizeof(dset->H5DRMM->mmap->fname) - + strlen(dset->H5DRMM->mmap->fname) - 1); char cc[255]; int2char(dset->H5DRMM->mpi->rank, cc); - strcat(dset->H5DRMM->mmap->fname, cc); - strcat(dset->H5DRMM->mmap->fname, ".dat"); + strncat(dset->H5DRMM->mmap->fname, cc, + sizeof(dset->H5DRMM->mmap->fname) - + strlen(dset->H5DRMM->mmap->fname) - 1); + strncat(dset->H5DRMM->mmap->fname, ".dat", + sizeof(dset->H5DRMM->mmap->fname) - + strlen(dset->H5DRMM->mmap->fname) - 1); #ifndef NDEBUG LOG_DEBUG(-1, "Dataset read cache created: %s", @@ -5735,9 +5781,15 @@ static herr_t create_group_cache_on_local_storage(void *obj, void *group_args, memcpy(group->H5DRMM->mpi, o->H5DRMM->mpi, sizeof(MPI_INFO)); if (group->H5LS->path != NULL) { strcpy(group->H5DRMM->cache->path, o->H5DRMM->cache->path); // create - strcat(group->H5DRMM->cache->path, "/"); - strcat(group->H5DRMM->cache->path, name); - strcat(group->H5DRMM->cache->path, "/"); + size_t remaining_size = sizeof(group->H5DRMM->cache->path) - + strlen(group->H5DRMM->cache->path) - 1; + strncat(group->H5DRMM->cache->path, "/", remaining_size); + remaining_size = sizeof(group->H5DRMM->cache->path) - + strlen(group->H5DRMM->cache->path) - 1; + strncat(group->H5DRMM->cache->path, name, remaining_size); + remaining_size = sizeof(group->H5DRMM->cache->path) - + strlen(group->H5DRMM->cache->path) - 1; + strncat(group->H5DRMM->cache->path, "/", remaining_size); #ifndef NDEBUG LOG_DEBUG(-1, "group cache created: %s", group->H5DRMM->cache->path); #endif @@ -5790,7 +5842,8 @@ static herr_t remove_dataset_cache_on_local_storage(void *dset, void **req) { H5VL_cache_ext_dataset_wait(dset); double t1 = MPI_Wtime(); #ifndef NDEBUG - LOG_DEBUG(-1, "dataset_wait time: %f", t1 - t0); + snprintf(log_buffer, LOG_BUFFER_SIZE, "dataset_wait time: %f", t1 - t0); + LOG_DEBUG(-1, "%s", log_buffer); #endif o->H5DWMM = NULL; } From 68cffbe1e6ffc69cd25ba5966e578ba5ff3f05bb Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Thu, 12 Dec 2024 18:04:37 -0600 Subject: [PATCH 25/47] Fix2 (#2) * fixed codeQL issues * Committing clang-format changes --------- Co-authored-by: github-actions --- src/H5LS.c | 19 +++++++++++++++---- src/H5LS_SSD.c | 4 +--- src/H5VLcache_ext.c | 40 ++++++++++++++++++++++------------------ 3 files changed, 38 insertions(+), 25 deletions(-) diff --git a/src/H5LS.c b/src/H5LS.c index bf5427f..2adaef5 100644 --- a/src/H5LS.c +++ b/src/H5LS.c @@ -97,7 +97,15 @@ cache_replacement_policy_t get_replacement_policy_from_str(char *str) { else if (!strcmp(str, "LIFO")) return LIFO; else { - LOG_ERROR(-1, "unknown cache replacement type: %s\n", str); + char error_msg[256]; + if (strlen(str) < 200) { + snprintf(error_msg, sizeof(error_msg), + "unknown cache replacement type: %s\n", str); + } else { + snprintf(error_msg, sizeof(error_msg), + "unknown cache replacement type: string too long to display\n"); + } + LOG_ERROR(-1, "%s", error_msg); return FAIL; } } @@ -132,7 +140,8 @@ herr_t readLSConf(char *fname, cache_storage_t *LS) { } FILE *file = fopen(fname, "r"); LS->path = (char *)malloc(255); - strcpy(LS->path, "./"); + strncpy(LS->path, "./", 254); + LS->path[254] = '\0'; LS->mspace_total = 137438953472; strcpy(LS->type, "SSD"); strcpy(LS->scope, "LOCAL"); @@ -171,9 +180,11 @@ herr_t readLSConf(char *fname, cache_storage_t *LS) { else if (!strcmp(ip, "HDF5_CACHE_WRITE_BUFFER_SIZE")) LS->write_buffer_size = (hsize_t)atof(mac); else if (!strcmp(ip, "HDF5_CACHE_STORAGE_TYPE")) { - strcpy(LS->type, mac); + strncpy(LS->type, mac, sizeof(LS->type) - 1); + LS->type[sizeof(LS->type) - 1] = '\0'; } else if (!strcmp(ip, "HDF5_CACHE_STORAGE_SCOPE")) { - strcpy(LS->scope, mac); + strncpy(LS->scope, mac, sizeof(LS->scope) - 1); + LS->scope[sizeof(LS->scope) - 1] = '\0'; } else if (!strcmp(ip, "HDF5_CACHE_REPLACEMENT_POLICY")) { if (get_replacement_policy_from_str(mac) > 0) LS->replacement_policy = get_replacement_policy_from_str(mac); diff --git a/src/H5LS_SSD.c b/src/H5LS_SSD.c index 4105e71..01f09e8 100644 --- a/src/H5LS_SSD.c +++ b/src/H5LS_SSD.c @@ -42,7 +42,7 @@ static herr_t H5Ssel_gather_write(hid_t space, hid_t tid, const void *buf, char *p = (char *)buf; int i; for (i = 0; i < nseq; i++) { - int err = pwrite(fd, &p[off[i]], len[i], offset + off_contig); + pwrite(fd, &p[off[i]], len[i], offset + off_contig); off_contig += len[i]; } #ifdef __APPLE__ @@ -58,7 +58,6 @@ static herr_t H5LS_SSD_create_write_mmap(MMAP *mm, hsize_t size) { strcpy(dname, mm->fname); mkdirRecursive(dirname(dname), 0755); // dirname will change dname in linux. // therefore, we make copy first. - struct stat info; mm->fd = open(mm->fname, O_RDWR | O_CREAT | O_TRUNC, 0644); return 0; } @@ -98,7 +97,6 @@ static herr_t H5LS_SSD_create_read_mmap(MMAP *mm, hsize_t size) { /* clean up read mmap buffer, files */ static herr_t H5LS_SSD_remove_read_mmap(MMAP *mm, hsize_t size) { - herr_t ret; munmap(mm->buf, size); close(mm->fd); if (access(mm->fname, F_OK) == 0) diff --git a/src/H5VLcache_ext.c b/src/H5VLcache_ext.c index 3e9868d..2ba981e 100644 --- a/src/H5VLcache_ext.c +++ b/src/H5VLcache_ext.c @@ -19,6 +19,7 @@ #include #include +#include #include #include #include @@ -955,7 +956,7 @@ static herr_t H5VL_cache_ext_init(hid_t vipl_id) { int called = 0; MPI_Initialized(&called); if (called == 1) { - int provided = 0; + provided = 0; MPI_Query_thread(&provided); MPI_Comm_size(MPI_COMM_WORLD, &NPROC); MPI_Comm_rank(MPI_COMM_WORLD, &RANK); @@ -967,7 +968,7 @@ static herr_t H5VL_cache_ext_init(hid_t vipl_id) { MPI_Abort(MPI_COMM_WORLD, 1); } } else { - int provided = 0; + provided = 0; MPI_Init_thread(NULL, NULL, MPI_THREAD_MULTIPLE, &provided); MPI_Comm_size(MPI_COMM_WORLD, &NPROC); MPI_Comm_rank(MPI_COMM_WORLD, &RANK); @@ -1315,11 +1316,18 @@ static herr_t native_vol_info(void **_info) { H5VL_cache_ext_info_t *info; unsigned under_vol_value; const char *under_vol_info_start, *under_vol_info_end; - hid_t under_vol_id; + hid_t under_vol_id = H5I_INVALID_HID; void *under_vol_info = NULL; /* Retrieve the underlying VOL connector value and info */ - sscanf(str, "under_vol=%u;", &under_vol_value); + if (sscanf(str, "under_vol=%u;", &under_vol_value) != 1) { + LOG_ERROR( + -1, + "Failed to parse under_vol value; make sure you have" + " 'config=...;under_vol=...' in your HDF5_VOL_CONNECTOR " + "setup"); + MPI_Abort(MPI_COMM_WORLD, 1); + } under_vol_id = H5VLregister_connector_by_value( (H5VL_class_value_t)under_vol_value, H5P_DEFAULT); @@ -1395,7 +1403,14 @@ static herr_t H5VL_cache_ext_str_to_info(const char *str, void **_info) { "setup"); MPI_Abort(MPI_COMM_WORLD, 1); } - sscanf(lasts, "under_vol=%u;", &under_vol_value); + if (sscanf(lasts, "under_vol=%u;", &under_vol_value) != 1) { + LOG_ERROR( + -1, + "Failed to parse under_vol value; make sure you have" + " 'config=...;under_vol=...' in your HDF5_VOL_CONNECTOR " + "setup"); + MPI_Abort(MPI_COMM_WORLD, 1); + } under_vol_id = H5VLregister_connector_by_value( (H5VL_class_value_t)under_vol_value, H5P_DEFAULT); under_vol_info_start = strchr(lasts, '{'); @@ -2038,7 +2053,6 @@ static herr_t H5VL_cache_ext_dataset_mmap_remap(void *obj) { hsize_t ss = round_page(dset->H5DRMM->dset.size); if (strcmp(dset->H5LS->type, "MEMORY") != 0) { // msync(dset->H5DRMM->mmap->buf, ss, MS_SYNC); - double t0 = MPI_Wtime(); munmap(dset->H5DRMM->mmap->buf, ss); #ifdef __linux__ posix_fadvise(dset->H5DRMM->mmap->fd, 0, ss, POSIX_FADV_DONTNEED); @@ -5388,8 +5402,6 @@ static herr_t create_file_cache_on_local_storage(void *obj, void *file_args, file_args_t *args = (file_args_t *)file_args; const char *name = args->name; - herr_t ret_value; - hsize_t size_f; H5VL_cache_ext_t *file = (H5VL_cache_ext_t *)obj; H5VL_cache_ext_info_t *info; @@ -5545,7 +5557,6 @@ static herr_t create_file_cache_on_local_storage(void *obj, void *file_args, static herr_t remove_file_cache_on_local_storage(void *file, void **req) { H5VL_cache_ext_t *o = (H5VL_cache_ext_t *)file; - herr_t ret_value; if (o->write_cache) { H5VL_cache_ext_file_wait(file); o->H5LS->mmap_cls->remove_write_mmap(o->H5DWMM->mmap, 0); @@ -5597,7 +5608,6 @@ static herr_t create_dataset_cache_on_local_storage(void *obj, void *dset_args, #endif dset_args_t *args = (dset_args_t *)dset_args; const char *name = args->name; - herr_t ret_value; H5VL_cache_ext_t *dset = (H5VL_cache_ext_t *)obj; H5VL_cache_ext_t *o = (H5VL_cache_ext_t *)dset->parent; H5VL_cache_ext_t *p = o; @@ -5771,7 +5781,6 @@ static herr_t create_group_cache_on_local_storage(void *obj, void *group_args, #endif group_args_t *args = (group_args_t *)group_args; const char *name = args->name; - herr_t ret_value; H5VL_cache_ext_t *group = (H5VL_cache_ext_t *)obj; H5VL_cache_ext_t *o = (H5VL_cache_ext_t *)group->parent; if (group->read_cache) { @@ -6092,8 +6101,6 @@ static herr_t create_file_cache_on_global_storage(void *obj, void *file_args, LOG_INFO(-1, "VOL File cache create "); #endif file_args_t *args = (file_args_t *)file_args; - herr_t ret_value; - hsize_t size_f; H5VL_cache_ext_t *file = (H5VL_cache_ext_t *)obj; // hid_t fapl_id = args->fapl_id; @@ -6224,7 +6231,6 @@ static herr_t create_dataset_cache_on_global_storage(void *obj, void *dset_args, // set up read cache: obj, dset object // loc - where is the dataset located - group or file object dset_args_t *args = (dset_args_t *)dset_args; - herr_t ret_value; H5VL_cache_ext_t *dset = (H5VL_cache_ext_t *)obj; H5VL_cache_ext_t *o = (H5VL_cache_ext_t *)dset->parent; while (o->parent != NULL) @@ -6236,7 +6242,6 @@ static herr_t create_dataset_cache_on_global_storage(void *obj, void *dset_args, dset->H5DWMM->mpi = o->H5DWMM->mpi; dset->H5DWMM->mmap = (MMAP *)malloc(sizeof(MMAP)); dset->H5DWMM->io = o->H5DWMM->io; - hsize_t size_f; char fname[255]; file_get_name(o->under_object, o->under_vol_id, sizeof(fname), fname, @@ -6425,7 +6430,8 @@ static herr_t flush_data_from_global_storage(void *current_request, void *req2 = NULL; hid_t dxpl_id = H5Pcopy(task->xfer_plist_id); if (getenv("HDF5_ASYNC_DELAY_TIME")) { - int delay_time = atof(getenv("HDF5_ASYNC_DELAY_TIME")); + double delay_time_double = atof(getenv("HDF5_ASYNC_DELAY_TIME")); + int delay_time = (int)round(delay_time_double); // H5Pset_dxpl_delay(task->xfer_plist_id, delay_time); H5Pset_dxpl_delay(dxpl_id, delay_time); } @@ -6476,7 +6482,6 @@ static herr_t flush_data_from_global_storage(void *current_request, // H5VL_async_start(); if (getenv("HDF5_ASYNC_DELAY_TIME")) H5Pset_dxpl_delay(dxpl_id, 0); - H5VL_request_status_t status; o->H5LS->previous_write_req = task->req; // building next task #ifndef NDEBUG @@ -6520,7 +6525,6 @@ static herr_t remove_dataset_cache_on_global_storage(void *dset, void **req) { static herr_t remove_file_cache_on_global_storage(void *file, void **req) { H5VL_cache_ext_t *o = (H5VL_cache_ext_t *)file; - herr_t ret_value; if (o->write_cache) { H5VL_cache_ext_file_wait(file); H5Fclose(o->hd_glob); From 4bcc06b363953ac767f7faf85708fea7216cbb96 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Thu, 12 Dec 2024 20:09:31 -0600 Subject: [PATCH 26/47] Fix2 (#3) * fixed codeQL issues * Committing clang-format changes * codeQL fixes * Committing clang-format changes * syntax * Fix code scanning alert no. 292: Unbounded write Co-authored-by: Copilot Autofix powered by AI <62310815+github-advanced-security[bot]@users.noreply.github.com> * Committing clang-format changes * updates * Committing clang-format changes --------- Co-authored-by: github-actions Co-authored-by: Copilot Autofix powered by AI <62310815+github-advanced-security[bot]@users.noreply.github.com> --- src/H5LS.c | 36 +++++++++++++++++++++++------------- src/H5VLcache_ext.c | 15 +++------------ 2 files changed, 26 insertions(+), 25 deletions(-) diff --git a/src/H5LS.c b/src/H5LS.c index 2adaef5..4cd39c9 100644 --- a/src/H5LS.c +++ b/src/H5LS.c @@ -59,6 +59,9 @@ extern int RANK; extern int NPROC; +#define ERROR_MSG_SIZE 256 +char error_msg[ERROR_MSG_SIZE]; + /* Get the corresponding mmap function struct based on the type of node local storage The user can modify this function to other storage @@ -75,10 +78,14 @@ const H5LS_mmap_class_t *get_H5LS_mmap_class_t(char *type) { p = &H5LS_GPU_mmap_ext_g; #endif } else { - LOG_ERROR(-1, - "I don't know the type of storage: %s\n" - "Supported options: SSD|BURST_BUFFER|MEMORY|GPU\n", - type); + char truncated_type[128]; + strncpy(truncated_type, type, sizeof(truncated_type) - 1); + truncated_type[sizeof(truncated_type) - 1] = '\0'; + snprintf(error_msg, ERROR_MSG_SIZE, + "I don't know the type of storage: %s\n" + "Supported options: SSD|BURST_BUFFER|MEMORY|GPU\n", + truncated_type); + LOG_ERROR(-1, "%s", error_msg); MPI_Abort(MPI_COMM_WORLD, 111); } return p; @@ -97,12 +104,11 @@ cache_replacement_policy_t get_replacement_policy_from_str(char *str) { else if (!strcmp(str, "LIFO")) return LIFO; else { - char error_msg[256]; if (strlen(str) < 200) { - snprintf(error_msg, sizeof(error_msg), + snprintf(error_msg, ERROR_MSG_SIZE, "unknown cache replacement type: %s\n", str); } else { - snprintf(error_msg, sizeof(error_msg), + snprintf(error_msg, ERROR_MSG_SIZE, "unknown cache replacement type: string too long to display\n"); } LOG_ERROR(-1, "%s", error_msg); @@ -139,9 +145,9 @@ herr_t readLSConf(char *fname, cache_storage_t *LS) { MPI_Abort(MPI_COMM_WORLD, 100); } FILE *file = fopen(fname, "r"); - LS->path = (char *)malloc(255); - strncpy(LS->path, "./", 254); - LS->path[254] = '\0'; + LS->path = (char *)malloc(256); + strncpy(LS->path, "./", 255); + LS->path[255] = '\0'; LS->mspace_total = 137438953472; strcpy(LS->type, "SSD"); strcpy(LS->scope, "LOCAL"); @@ -153,11 +159,13 @@ herr_t readLSConf(char *fname, cache_storage_t *LS) { linenum++; if (line[0] == '#') continue; - if (sscanf(line, "%[^:]:%255s", ip, mac) != 2) { + if (sscanf(line, "%255[^:]:%255s", ip, mac) != 2) { if (RANK == io_node()) fprintf(stderr, "Syntax error, line %d\n", linenum); continue; } + ip[255] = '\0'; + mac[255] = '\0'; if (strlen(ip) >= 256 || strlen(mac) >= 256) { if (RANK == io_node()) fprintf(stderr, "Input too long, line %d\n", linenum); @@ -167,7 +175,7 @@ herr_t readLSConf(char *fname, cache_storage_t *LS) { if (strcmp(mac, "NULL") == 0) LS->path = NULL; else { - snprintf(LS->path, 255, "%s", mac); + snprintf(LS->path, 256, "%s", mac); } else if (!strcmp(ip, "HDF5_CACHE_FUSION_THRESHOLD")) { @@ -189,7 +197,9 @@ herr_t readLSConf(char *fname, cache_storage_t *LS) { if (get_replacement_policy_from_str(mac) > 0) LS->replacement_policy = get_replacement_policy_from_str(mac); } else { - LOG_WARN(-1, "Unknown configuration setup:", ip); + snprintf(error_msg, ERROR_MSG_SIZE, "Unknown configuration setup: %s", + ip); + LOG_WARN(-1, "%s", error_msg); } } if (LS->mspace_total < LS->write_buffer_size) { diff --git a/src/H5VLcache_ext.c b/src/H5VLcache_ext.c index 2ba981e..1493d20 100644 --- a/src/H5VLcache_ext.c +++ b/src/H5VLcache_ext.c @@ -1961,7 +1961,7 @@ static hid_t dataset_get_dapl(void *dset, hid_t driver_id, hid_t dxpl_id, /* So far this does not work */ static hid_t group_get_gapl(void *group, hid_t driver_id, hid_t dxpl_id, void **req) { - H5VL_dataset_get_args_t vol_cb_args; + // H5VL_dataset_get_args_t vol_cb_args; #ifndef NDEBUG LOG_WARN(-1, "Getting gapl from the group object " " is not implemented yet, returning H5P_DEFAULT"); @@ -2060,7 +2060,6 @@ static herr_t H5VL_cache_ext_dataset_mmap_remap(void *obj) { fsync(dset->H5DRMM->mmap->fd); close(dset->H5DRMM->mmap->fd); MPI_Win_free(&dset->H5DRMM->mpi->win); - double t1 = MPI_Wtime(); char tmp[252]; strcpy(tmp, dset->H5DRMM->mmap->fname); @@ -2080,7 +2079,6 @@ static herr_t H5VL_cache_ext_dataset_mmap_remap(void *obj) { MPI_Win_create(dset->H5DRMM->mmap->buf, ss, dset->H5DRMM->dset.esize, MPI_INFO_NULL, dset->H5DRMM->mpi->comm, &dset->H5DRMM->mpi->win); - double t2 = MPI_Wtime(); } return SUCCEED; } @@ -2351,7 +2349,6 @@ static herr_t H5VL_cache_ext_dataset_prefetch(void *obj, hid_t fspace, H5Sclose(fs_cpy); } if (ret_value == 0) { - hsize_t ss = round_page(dset->H5DRMM->dset.size); if (dset->H5LS->path != NULL) msync(dset->H5DRMM->mmap->buf, dset->H5DRMM->dset.size, MS_SYNC); dset->H5DRMM->io->dset_cached = true; @@ -3074,7 +3071,6 @@ static herr_t H5VL_cache_ext_dataset_wait(void *dset) { } if (o->write_cache) { - double available = o->H5DWMM->cache->mspace_per_rank_left; H5VL_request_status_t status; while ((o->num_request_dataset > 0) && (o->H5DWMM->io->current_request != NULL && @@ -3156,7 +3152,6 @@ static herr_t H5VL_cache_ext_file_wait(void *file) { o->H5DWMM->io->fusion_data_size = 0.0; o->H5DWMM->io->flush_request = o->H5DWMM->io->flush_request->next; } - double available = o->H5DWMM->cache->mspace_per_rank_left; H5VL_request_status_t status; while ((o->H5DWMM->io->current_request != NULL) && (o->H5DWMM->io->num_request > 0)) { @@ -3236,13 +3231,11 @@ static herr_t H5VL_cache_ext_dataset_close(void *dset, hid_t dxpl_id, // printf("task-current-type: %d\n", p->async_close_task_current->type); if (p->async_pause) H5Pset_dxpl_pause(dxpl_id, p->async_pause); - double tt0 = MPI_Wtime(); ret_value = H5VLdataset_close(o->under_object, o->under_vol_id, dxpl_id, &p->async_close_task_list->req); H5Pset_dxpl_pause(dxpl_id, false); // assert(p->async_close_task_list->req!=NULL); - double tt1 = MPI_Wtime(); /* if (write_req !=NULL) { printf(" set dependenace...."); @@ -5430,7 +5423,7 @@ static herr_t create_file_cache_on_local_storage(void *obj, void *file_args, #endif // getting mpi info - MPI_Comm comm, comm_dup; + MPI_Comm comm; MPI_Info mpi_info; H5Pget_fapl_mpio(args->fapl_id, &comm, &mpi_info); MPI_Comm_dup(comm, &file->H5DWMM->mpi->comm); @@ -5888,7 +5881,6 @@ static void *write_data_to_local_storage2(void *dset, hid_t mem_type_id, #ifndef NDEBUG LOG_INFO(-1, "caching data to local storage using MPI_Put"); #endif - hsize_t bytes = get_buf_size(mem_space_id, mem_type_id); get_samples_from_filespace(file_space_id, &o->H5DRMM->dset.batch, &o->H5DRMM->dset.contig_read); o->H5DRMM->mmap->tmp_buf = (void *)buf; @@ -6070,7 +6062,6 @@ static herr_t flush_data_from_local_storage(void *current_request, void **req) { } // for (size_t i = 0; i < count; i++) ((H5VL_cache_ext_t *)task->dataset_obj[0])->num_request_dataset++; - H5VL_request_status_t status; o->H5DWMM->io->num_request++; // building next task #ifndef NDEBUG @@ -6124,7 +6115,7 @@ static herr_t create_file_cache_on_global_storage(void *obj, void *file_args, "Remove first!"); return FAIL; } - MPI_Comm comm, comm_dup; + MPI_Comm comm; MPI_Info mpi_info; H5Pget_fapl_mpio(args->fapl_id, &comm, &mpi_info); From ca0469a45ff377619d618515a7c29274bd32245e Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Thu, 12 Dec 2024 23:22:12 -0600 Subject: [PATCH 27/47] fix10 --- src/H5VLcache_ext.c | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/src/H5VLcache_ext.c b/src/H5VLcache_ext.c index 1493d20..53f49af 100644 --- a/src/H5VLcache_ext.c +++ b/src/H5VLcache_ext.c @@ -951,7 +951,6 @@ hid_t H5VL_cache_ext_register(void) { *------------------------------------------------------------------------- */ static herr_t H5VL_cache_ext_init(hid_t vipl_id) { - int rank; int provided; int called = 0; MPI_Initialized(&called); @@ -2155,7 +2154,7 @@ static herr_t H5VL_cache_ext_dataset_prefetch_async(void *obj, hid_t fspace, hsize_t offset = round_page(dset->H5DRMM->dset.sample.size * nblock * nsample_per_block); // We only assume prefetching on dataset, not multiple. - void *ptr = &p[offset]; + // void *ptr = &p[offset]; ret_value = H5VLdataset_read(1, &dset->under_object, dset->under_vol_id, &dset->H5DRMM->dset.h5_datatype, &mspace, &fs_cpy, plist_id, (void **)&p, &r->req); @@ -2506,12 +2505,12 @@ static herr_t H5VL_cache_ext_dataset_read(size_t count, void *dset[], ret_value = H5VLdataset_read(count, obj, o->under_vol_id, mem_type_id, mem_space_id, file_space_id, plist_id, buf, req); - for (size_t i = 0; i < count; i++) + for (i = 0; i < count; i++) o->H5LS->cache_io_cls->write_data_to_cache2( dset[i], mem_type_id[i], mem_space_id[i], file_space_id[i], plist_id, buf[i], req); } else { - for (size_t i = 0; i < count; i++) + for (i = 0; i < count; i++) ret_value = o->H5LS->cache_io_cls->read_data_from_cache( dset[i], mem_type_id[i], mem_space_id[i], file_space_id[i], plist_id, buf[i], req); @@ -3129,7 +3128,8 @@ static herr_t H5VL_cache_ext_dataset_wait(void *dset) { H5ESclose(o->es_id); double t1 = MPI_Wtime(); #ifndef NDEBUG - LOG_DEBUG(-1, "ESwait time: %.5f seconds", t1 - t0); + snprintf(log_buffer, LOG_BUFFER_SIZE, "ESwait time: %.5f seconds", t1 - t0); + LOG_DEBUG(-1, "%s", log_buffer); #endif } return 0; @@ -3283,7 +3283,8 @@ static herr_t H5VL_cache_ext_dataset_close(void *dset, hid_t dxpl_id, double t1 = MPI_Wtime(); #ifndef NDEBUG - LOG_DEBUG(-1, "H5VLdataset_close time: %f", t1 - t0); + snprintf(log_buffer, LOG_BUFFER_SIZE, "H5VLdataset_close time: %f", t1 - t0); + LOG_DEBUG(-1, "%s", log_buffer); #endif /* Check for async request */ @@ -4037,19 +4038,19 @@ static herr_t H5VL_cache_ext_file_optional(void *file, } if (o->async_close && o->async_pause) { - object_close_task_t *p = + object_close_task_t *p1 = (object_close_task_t *)o->async_close_task_current; #ifndef NDEBUG LOG_INFO(-1, "starting async close task"); #endif int n = 0; - while (p != NULL && p->req != NULL) { + while (p1 != NULL && p1->req != NULL) { #ifndef NDEBUG - LOG_DEBUG(-1, "starting async close task: %d, %d", n, p->type); + LOG_DEBUG(-1, "starting async close task: %d, %d", n, p1->type); #endif - H5async_start(p->req); - p = p->next; + H5async_start(p1->req); + p1 = p1->next; n++; } } @@ -6412,7 +6413,7 @@ static herr_t flush_data_from_global_storage(void *current_request, // question: How to combine these two calls and make them dependent from each // other hsize_t bytes; - for (size_t i = 0; i < count; i++) { + for (i = 0; i < count; i++) { bytes = get_buf_size(task->mem_space_id[i], task->mem_type_id[i]); task->buf[i] = malloc(bytes); } From 430f10c8bb5911a2a7b48f4682d81fa8469334b2 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Thu, 12 Dec 2024 23:40:58 -0600 Subject: [PATCH 28/47] fix11 --- src/H5LS.c | 10 ++++++---- src/H5VLcache_ext.c | 4 ++-- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/src/H5LS.c b/src/H5LS.c index 4cd39c9..b9ddec2 100644 --- a/src/H5LS.c +++ b/src/H5LS.c @@ -409,9 +409,10 @@ herr_t H5LSclaim_space(cache_storage_t *LS, hsize_t size, cache_claim_t type, if (LS->mspace_left > size) { LS->mspace_left = LS->mspace_left - size; #ifndef NDEBUG - LOG_DEBUG(-1, "Claimed: %.4f GiB\n", size / 1024. / 1024. / 1024.); - LOG_DEBUG(-1, "LS->space left: %.4f GiB\n", - LS->mspace_left / 1024. / 1024 / 1024.); + snprintf(error_msg, ERROR_MSG_SIZE, "Claimed: %.4f GiB\n", size / 1024. / 1024. / 1024.); + LOG_DEBUG(-1, "%s", error_msg); + snprintf(error_msg, ERROR_MSG_SIZE, "LS->space left: %.4f GiB\n", LS->mspace_left / 1024. / 1024 / 1024.); + LOG_DEBUG(-1, "%s", error_msg); #endif return SUCCEED; } else { @@ -432,7 +433,8 @@ herr_t H5LSclaim_space(cache_storage_t *LS, hsize_t size, cache_claim_t type, stay = tmp; if (mspace < size) { #ifndef NDEBUG - LOG_DEBUG(-1, "mspace (bytes): %f - %lu\n", mspace, size); + snprintf(error_msg, ERROR_MSG_SIZE, "mspace (bytes): %f - %lu\n", mspace, size); + LOG_DEBUG(-1, "%s", error_msg); #endif return FAIL; } else { diff --git a/src/H5VLcache_ext.c b/src/H5VLcache_ext.c index 53f49af..553278e 100644 --- a/src/H5VLcache_ext.c +++ b/src/H5VLcache_ext.c @@ -2151,8 +2151,8 @@ static herr_t H5VL_cache_ext_dataset_prefetch_async(void *obj, hid_t fspace, H5Sget_simple_extent_dims(fs_cpy, ldims, NULL); ldims[0] = dset->H5DRMM->dset.ns_loc % nsample_per_block; hid_t mspace = H5Screate_simple(ndims, ldims, NULL); - hsize_t offset = round_page(dset->H5DRMM->dset.sample.size * nblock * - nsample_per_block); + //hsize_t offset = round_page(dset->H5DRMM->dset.sample.size * nblock * + // nsample_per_block); // We only assume prefetching on dataset, not multiple. // void *ptr = &p[offset]; ret_value = H5VLdataset_read(1, &dset->under_object, dset->under_vol_id, From b55750f54663abd37a3c00aa54a508d2c9be8a1a Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 13 Dec 2024 05:41:20 +0000 Subject: [PATCH 29/47] Committing clang-format changes --- src/H5LS.c | 9 ++++++--- src/H5VLcache_ext.c | 2 +- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/H5LS.c b/src/H5LS.c index b9ddec2..8ac46c0 100644 --- a/src/H5LS.c +++ b/src/H5LS.c @@ -409,9 +409,11 @@ herr_t H5LSclaim_space(cache_storage_t *LS, hsize_t size, cache_claim_t type, if (LS->mspace_left > size) { LS->mspace_left = LS->mspace_left - size; #ifndef NDEBUG - snprintf(error_msg, ERROR_MSG_SIZE, "Claimed: %.4f GiB\n", size / 1024. / 1024. / 1024.); + snprintf(error_msg, ERROR_MSG_SIZE, "Claimed: %.4f GiB\n", + size / 1024. / 1024. / 1024.); LOG_DEBUG(-1, "%s", error_msg); - snprintf(error_msg, ERROR_MSG_SIZE, "LS->space left: %.4f GiB\n", LS->mspace_left / 1024. / 1024 / 1024.); + snprintf(error_msg, ERROR_MSG_SIZE, "LS->space left: %.4f GiB\n", + LS->mspace_left / 1024. / 1024 / 1024.); LOG_DEBUG(-1, "%s", error_msg); #endif return SUCCEED; @@ -433,7 +435,8 @@ herr_t H5LSclaim_space(cache_storage_t *LS, hsize_t size, cache_claim_t type, stay = tmp; if (mspace < size) { #ifndef NDEBUG - snprintf(error_msg, ERROR_MSG_SIZE, "mspace (bytes): %f - %lu\n", mspace, size); + snprintf(error_msg, ERROR_MSG_SIZE, "mspace (bytes): %f - %lu\n", + mspace, size); LOG_DEBUG(-1, "%s", error_msg); #endif return FAIL; diff --git a/src/H5VLcache_ext.c b/src/H5VLcache_ext.c index 553278e..f66d5f3 100644 --- a/src/H5VLcache_ext.c +++ b/src/H5VLcache_ext.c @@ -2151,7 +2151,7 @@ static herr_t H5VL_cache_ext_dataset_prefetch_async(void *obj, hid_t fspace, H5Sget_simple_extent_dims(fs_cpy, ldims, NULL); ldims[0] = dset->H5DRMM->dset.ns_loc % nsample_per_block; hid_t mspace = H5Screate_simple(ndims, ldims, NULL); - //hsize_t offset = round_page(dset->H5DRMM->dset.sample.size * nblock * + // hsize_t offset = round_page(dset->H5DRMM->dset.sample.size * nblock * // nsample_per_block); // We only assume prefetching on dataset, not multiple. // void *ptr = &p[offset]; From 23511a649979b2ecaa6f43a627e3463fc97f3e9a Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Fri, 13 Dec 2024 13:34:03 -0600 Subject: [PATCH 30/47] critical fix --- src/H5LS.c | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/H5LS.c b/src/H5LS.c index 8ac46c0..b44fdc7 100644 --- a/src/H5LS.c +++ b/src/H5LS.c @@ -216,7 +216,11 @@ herr_t readLSConf(char *fname, cache_storage_t *LS) { (stat(LS->path, &sb) == 0 && S_ISDIR(sb.st_mode))) { return 0; } else { - LOG_ERROR(-1, "H5LSset: path %s does not exist\n", LS->path); + int ret = snprintf(error_msg, ERROR_MSG_SIZE, "H5LSset: path %s does not exist\n", LS->path); + if (ret < 0 || ret >= ERROR_MSG_SIZE) { + LOG_WARN(-1, "path error message truncated"); + } + LOG_ERROR(-1, "%s", error_msg); MPI_Abort(MPI_COMM_WORLD, 112); } } From 42f5fadcd91aa6a985426608f127c054630790d6 Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 13 Dec 2024 19:34:30 +0000 Subject: [PATCH 31/47] Committing clang-format changes --- src/H5LS.c | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/H5LS.c b/src/H5LS.c index b44fdc7..53cc9da 100644 --- a/src/H5LS.c +++ b/src/H5LS.c @@ -216,7 +216,8 @@ herr_t readLSConf(char *fname, cache_storage_t *LS) { (stat(LS->path, &sb) == 0 && S_ISDIR(sb.st_mode))) { return 0; } else { - int ret = snprintf(error_msg, ERROR_MSG_SIZE, "H5LSset: path %s does not exist\n", LS->path); + int ret = snprintf(error_msg, ERROR_MSG_SIZE, + "H5LSset: path %s does not exist\n", LS->path); if (ret < 0 || ret >= ERROR_MSG_SIZE) { LOG_WARN(-1, "path error message truncated"); } From f0d95f8328191755d6d80cafe779c82c12c15e09 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Fri, 13 Dec 2024 16:59:45 -0600 Subject: [PATCH 32/47] codeql fix --- src/H5LS.c | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/src/H5LS.c b/src/H5LS.c index 53cc9da..d706f8b 100644 --- a/src/H5LS.c +++ b/src/H5LS.c @@ -59,8 +59,10 @@ extern int RANK; extern int NPROC; +#define MAX_TRUNC_MSG_LEN 128 #define ERROR_MSG_SIZE 256 char error_msg[ERROR_MSG_SIZE]; +char truncated_msg[MAX_TRUNC_MSG_LEN]; /* Get the corresponding mmap function struct based on the type of node local @@ -78,13 +80,18 @@ const H5LS_mmap_class_t *get_H5LS_mmap_class_t(char *type) { p = &H5LS_GPU_mmap_ext_g; #endif } else { - char truncated_type[128]; - strncpy(truncated_type, type, sizeof(truncated_type) - 1); - truncated_type[sizeof(truncated_type) - 1] = '\0'; - snprintf(error_msg, ERROR_MSG_SIZE, - "I don't know the type of storage: %s\n" - "Supported options: SSD|BURST_BUFFER|MEMORY|GPU\n", - truncated_type); + + size_t copy_len = strlcpy(truncated_msg, type, sizeof(truncated_msg)); + if (copy_len >= MAX_TRUNC_MSG_LEN) { + LOG_WARN(-1,"Storage type string truncated"); + } + int ret = snprintf(error_msg, ERROR_MSG_SIZE, + "I don't know the type of storage: %s\n" + "Supported options: SSD|BURST_BUFFER|MEMORY|GPU\n", + truncated_msg); + if (ret < 0 || ret >= ERROR_MSG_SIZE) { + LOG_WARN(-1, "Storage type string truncated"); + } LOG_ERROR(-1, "%s", error_msg); MPI_Abort(MPI_COMM_WORLD, 111); } From 74f10400271ca266f2ba07c02bfea024bea75df9 Mon Sep 17 00:00:00 2001 From: github-actions Date: Fri, 13 Dec 2024 23:00:10 +0000 Subject: [PATCH 33/47] Committing clang-format changes --- src/H5LS.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/H5LS.c b/src/H5LS.c index d706f8b..a5919c7 100644 --- a/src/H5LS.c +++ b/src/H5LS.c @@ -83,7 +83,7 @@ const H5LS_mmap_class_t *get_H5LS_mmap_class_t(char *type) { size_t copy_len = strlcpy(truncated_msg, type, sizeof(truncated_msg)); if (copy_len >= MAX_TRUNC_MSG_LEN) { - LOG_WARN(-1,"Storage type string truncated"); + LOG_WARN(-1, "Storage type string truncated"); } int ret = snprintf(error_msg, ERROR_MSG_SIZE, "I don't know the type of storage: %s\n" From 48bd5bbe91c2b5b044269ed92748047a5c25c9f9 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Fri, 13 Dec 2024 20:00:23 -0600 Subject: [PATCH 34/47] fix --- src/H5LS.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/H5LS.c b/src/H5LS.c index a5919c7..2c13202 100644 --- a/src/H5LS.c +++ b/src/H5LS.c @@ -60,7 +60,7 @@ extern int RANK; extern int NPROC; #define MAX_TRUNC_MSG_LEN 128 -#define ERROR_MSG_SIZE 256 +#define ERROR_MSG_SIZE 283 char error_msg[ERROR_MSG_SIZE]; char truncated_msg[MAX_TRUNC_MSG_LEN]; From 5b47b7d944eb0a1ea9e680309f7497b299ab2e0d Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Fri, 7 Feb 2025 16:51:04 -0600 Subject: [PATCH 35/47] codeql fixes --- .github/workflows/codeql.yml | 4 ++-- .github/workflows/hdf5-latest.yml | 2 +- src/H5LS.c | 8 ++++++-- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 583771f..4e5d4f1 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -16,8 +16,8 @@ on: branches: [ "develop" ] pull_request: branches: [ "develop" ] - schedule: - - cron: '43 6 * * 3' +# schedule: +# - cron: '43 6 * * 3' jobs: analyze: diff --git a/.github/workflows/hdf5-latest.yml b/.github/workflows/hdf5-latest.yml index 0e66196..28dbee4 100644 --- a/.github/workflows/hdf5-latest.yml +++ b/.github/workflows/hdf5-latest.yml @@ -84,7 +84,7 @@ jobs: ctest --output-on-failure - name: Upload - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: git.txt path: ${{ runner.workspace }}/vol-cache/hdf5/git.txt diff --git a/src/H5LS.c b/src/H5LS.c index 2c13202..9ed4e08 100644 --- a/src/H5LS.c +++ b/src/H5LS.c @@ -182,7 +182,8 @@ herr_t readLSConf(char *fname, cache_storage_t *LS) { if (strcmp(mac, "NULL") == 0) LS->path = NULL; else { - snprintf(LS->path, 256, "%s", mac); + strncpy(LS->path, mac, 255); + LS->path[255] = '\0'; } else if (!strcmp(ip, "HDF5_CACHE_FUSION_THRESHOLD")) { @@ -204,8 +205,11 @@ herr_t readLSConf(char *fname, cache_storage_t *LS) { if (get_replacement_policy_from_str(mac) > 0) LS->replacement_policy = get_replacement_policy_from_str(mac); } else { + char temp_ip[256]; + strncpy(temp_ip, ip, sizeof(temp_ip) - 1); + temp_ip[sizeof(temp_ip) - 1] = '\0'; snprintf(error_msg, ERROR_MSG_SIZE, "Unknown configuration setup: %s", - ip); + temp_ip); LOG_WARN(-1, "%s", error_msg); } } From 4e164fa14a1674ea1fc8a6ca4ab789d0a5e7bb19 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 10 Feb 2025 15:36:58 -0600 Subject: [PATCH 36/47] undo cron --- .github/workflows/codeql.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 4e5d4f1..583771f 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -16,8 +16,8 @@ on: branches: [ "develop" ] pull_request: branches: [ "develop" ] -# schedule: -# - cron: '43 6 * * 3' + schedule: + - cron: '43 6 * * 3' jobs: analyze: From 6b2b826d69f39a71afbc6418a9e1dcee2e2c9756 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 10 Feb 2025 15:51:42 -0600 Subject: [PATCH 37/47] changed from strlcpy to snprintf --- src/H5LS.c | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/H5LS.c b/src/H5LS.c index 9ed4e08..1fefa37 100644 --- a/src/H5LS.c +++ b/src/H5LS.c @@ -80,8 +80,7 @@ const H5LS_mmap_class_t *get_H5LS_mmap_class_t(char *type) { p = &H5LS_GPU_mmap_ext_g; #endif } else { - - size_t copy_len = strlcpy(truncated_msg, type, sizeof(truncated_msg)); + size_t copy_len = snprintf(truncated_msg, sizeof(truncated_msg), "%s", type); if (copy_len >= MAX_TRUNC_MSG_LEN) { LOG_WARN(-1, "Storage type string truncated"); } From a0f688474663be6dc727d2d2be8ccd10a5ee5bbd Mon Sep 17 00:00:00 2001 From: github-actions Date: Mon, 10 Feb 2025 21:52:04 +0000 Subject: [PATCH 38/47] Committing clang-format changes --- src/H5LS.c | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/H5LS.c b/src/H5LS.c index 1fefa37..5228a63 100644 --- a/src/H5LS.c +++ b/src/H5LS.c @@ -80,7 +80,8 @@ const H5LS_mmap_class_t *get_H5LS_mmap_class_t(char *type) { p = &H5LS_GPU_mmap_ext_g; #endif } else { - size_t copy_len = snprintf(truncated_msg, sizeof(truncated_msg), "%s", type); + size_t copy_len = + snprintf(truncated_msg, sizeof(truncated_msg), "%s", type); if (copy_len >= MAX_TRUNC_MSG_LEN) { LOG_WARN(-1, "Storage type string truncated"); } From 58583cd55cd8472ff5253d3acb955210cbc6b421 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 10 Feb 2025 16:45:34 -0600 Subject: [PATCH 39/47] switched log macros to use snprintf --- utils/debug.h | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/utils/debug.h b/utils/debug.h index d8ea24a..b4d8aa0 100644 --- a/utils/debug.h +++ b/utils/debug.h @@ -42,31 +42,31 @@ void log_init(int rank); #define LOG_DEBUG(X, ...) \ { \ char msg_debug[283]; \ - sprintf(msg_debug, __VA_ARGS__); \ + snprintf(msg_debug, sizeof(msg_debug), __VA_ARGS__); \ log_debug(__FILE__, __func__, __LINE__, X, msg_debug); \ } #define LOG_WARN(X, ...) \ { \ char msg_debug[283]; \ - sprintf(msg_debug, __VA_ARGS__); \ + snprintf(msg_debug, sizeof(msg_debug), __VA_ARGS__); \ log_warn(__FILE__, __func__, __LINE__, X, msg_debug); \ } #define LOG_INFO(X, ...) \ { \ char msg_debug[283]; \ - sprintf(msg_debug, __VA_ARGS__); \ + snprintf(msg_debug, sizeof(msg_debug), __VA_ARGS__); \ log_info(__FILE__, __func__, __LINE__, X, msg_debug); \ } #define LOG_ERROR(X, ...) \ { \ char msg_debug[283]; \ - sprintf(msg_debug, __VA_ARGS__); \ + snprintf(msg_debug, sizeof(msg_debug), __VA_ARGS__); \ log_error(__FILE__, __func__, __LINE__, X, msg_debug); \ } #define LOG_TRACE(X, ...) \ { \ char msg_debug[283]; \ - sprintf(msg_debug, __VA_ARGS__); \ + snprintf(msg_debug, sizeof(msg_debug), __VA_ARGS__); \ log_trace(__FILE__, __func__, __LINE__, X, msg_debug); \ } #define malloc(...) my_malloc(__FILE__, __LINE__, __FUNCTION__, __VA_ARGS__) From 422fe3b6ce53d2346efc9510f3cc1a9f7a9c2b84 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 10 Feb 2025 17:16:13 -0600 Subject: [PATCH 40/47] fixed sscanf --- src/H5VLcache_ext.c | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/H5VLcache_ext.c b/src/H5VLcache_ext.c index f66d5f3..e220032 100644 --- a/src/H5VLcache_ext.c +++ b/src/H5VLcache_ext.c @@ -1319,7 +1319,8 @@ static herr_t native_vol_info(void **_info) { void *under_vol_info = NULL; /* Retrieve the underlying VOL connector value and info */ - if (sscanf(str, "under_vol=%u;", &under_vol_value) != 1) { + int scan_ret; + if ((scan_ret = sscanf(str, "under_vol=%u;", &under_vol_value)) != 1) { LOG_ERROR( -1, "Failed to parse under_vol value; make sure you have" @@ -1402,7 +1403,8 @@ static herr_t H5VL_cache_ext_str_to_info(const char *str, void **_info) { "setup"); MPI_Abort(MPI_COMM_WORLD, 1); } - if (sscanf(lasts, "under_vol=%u;", &under_vol_value) != 1) { + int scan_ret; + if ((scan_ret = sscanf(lasts, "under_vol=%u;", &under_vol_value)) != 1) { LOG_ERROR( -1, "Failed to parse under_vol value; make sure you have" From 2cd7dcae0837759308b05306fb864667662f0bd7 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 10 Feb 2025 17:44:06 -0600 Subject: [PATCH 41/47] removed snprintf --- src/H5VLcache_ext.c | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/H5VLcache_ext.c b/src/H5VLcache_ext.c index e220032..3bbe1e4 100644 --- a/src/H5VLcache_ext.c +++ b/src/H5VLcache_ext.c @@ -785,8 +785,7 @@ static herr_t async_close_task_wait(object_close_task_t *task) { LOG_WARN(-1, "Close request is NULL."); } #ifndef NDEBUG - snprintf(log_buffer, LOG_BUFFER_SIZE, "async task finished %d", task->type); - LOG_DEBUG(-1, "%s", log_buffer); + LOG_DEBUG(-1, "async task finished %d", task->type); double t1 = MPI_Wtime(); snprintf(log_buffer, LOG_BUFFER_SIZE, "Delay closed object: %d time: %10.6f", task->type, t1 - t0); From b83a87374df9ea2b5687c8b123b73c8b5d2c2018 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 10 Feb 2025 18:13:32 -0600 Subject: [PATCH 42/47] restore original --- src/H5VLcache_ext.c | 58 ++++++++++++++++++--------------------------- 1 file changed, 23 insertions(+), 35 deletions(-) diff --git a/src/H5VLcache_ext.c b/src/H5VLcache_ext.c index 3bbe1e4..d2470c5 100644 --- a/src/H5VLcache_ext.c +++ b/src/H5VLcache_ext.c @@ -98,7 +98,6 @@ int RANK = 0; int NPROC = 1; hbool_t HDF5_CACHE_CLOSE_ASYNC = 0; -char log_buffer[LOG_BUFFER_SIZE]; // Functions from async VOL int H5VL_async_set_delay_time(uint64_t time_us); herr_t H5VL_async_set_request_dep(void *request, void *parent_request); @@ -109,9 +108,7 @@ herr_t H5VL_async_start(); #define H5Pcopy(X) \ H5Pcopy(X); \ - snprintf(log_buffer, LOG_BUFFER_SIZE, "H5Pcopy called: %s:%d %s\n", \ - __FILE__, __LINE__, __FUNCTION__); \ - LOG_DEBUG(-1, "%s", log_buffer); + LOG_DEBUG(-1, "H5Pcopy called: %s:%d %s\n", __FILE__, __LINE__, __FUNCTION__); #define H5Scopy(X) \ H5Scopy(X); \ @@ -787,17 +784,17 @@ static herr_t async_close_task_wait(object_close_task_t *task) { #ifndef NDEBUG LOG_DEBUG(-1, "async task finished %d", task->type); double t1 = MPI_Wtime(); - snprintf(log_buffer, LOG_BUFFER_SIZE, "Delay closed object: %d time: %10.6f", - task->type, t1 - t0); - LOG_DEBUG(-1, "%s", log_buffer); + LOG_DEBUG(-1, + "Delay closed object: %d time: " + "%10.6f", + task->type, t1 - t0); #endif if (o->read_cache || o->write_cache) o->H5LS->cache_io_cls->remove_cache(task->obj, NULL); H5VL_cache_ext_free_obj(o); #ifndef NDEBUG double t2 = MPI_Wtime(); - snprintf(log_buffer, LOG_BUFFER_SIZE, "Remove cache time: %10.6f", t2 - t1); - LOG_DEBUG(-1, "%s", log_buffer); + LOG_DEBUG(-1, "Remove cache time: %10.6f", t2 - t1); #endif free(task->req); return 0; @@ -2576,12 +2573,11 @@ static herr_t free_cache_space_from_dataset(void *dset, hsize_t size) { } H5VL_request_status_t status; #ifndef NDEBUG - snprintf(log_buffer, LOG_BUFFER_SIZE, - "request wait(jobid: %d), current available space: " - "%.5f GiB ", - o->H5DWMM->io->current_request->id, - o->H5DWMM->cache->mspace_per_rank_left / 1024. / 1024. / 1024); - LOG_DEBUG(-1, "%s", log_buffer); + LOG_DEBUG(-1, + "request wait(jobid: %d), current available space: " + "%.5f GiB ", + o->H5DWMM->io->current_request->id, + o->H5DWMM->cache->mspace_per_rank_left / 1024. / 1024. / 1024); #endif while ((o->H5DWMM->io->current_request != NULL && o->H5DWMM->io->current_request->req != NULL)) { @@ -2694,8 +2690,7 @@ static herr_t merge_tasks_in_queue(task_data_t **task_list, int ntasks) { free(t_com); double t1 = MPI_Wtime(); #ifndef NDEBUG - snprintf(log_buffer, LOG_BUFFER_SIZE, "Merging time: %6.5f", t1 - t0); - LOG_DEBUG(-1, "%s", log_buffer); + LOG_DEBUG(-1, "Merging time: %6.5f", t1 - t0); #endif return SUCCEED; } @@ -3102,10 +3097,8 @@ static herr_t H5VL_cache_ext_dataset_wait(void *dset) { } double t1 = MPI_Wtime(); #ifndef NDEBUG - snprintf(log_buffer, LOG_BUFFER_SIZE, - "H5VLreqeust_wait time (jobid: %d): %g", - o->H5DWMM->io->current_request->id, t1 - t0); - LOG_DEBUG(-1, "%s", log_buffer); + LOG_DEBUG(-1, "H5VLrequest_wait time (jobid: %d): %f", + o->H5DWMM->io->current_request->id, t1 - t0); LOG_DEBUG(-1, "Tasks %d(%ld merged) finished", o->H5DWMM->io->current_request->id, @@ -3129,8 +3122,7 @@ static herr_t H5VL_cache_ext_dataset_wait(void *dset) { H5ESclose(o->es_id); double t1 = MPI_Wtime(); #ifndef NDEBUG - snprintf(log_buffer, LOG_BUFFER_SIZE, "ESwait time: %.5f seconds", t1 - t0); - LOG_DEBUG(-1, "%s", log_buffer); + LOG_DEBUG(-1, "ESwait time: %.5f seconds", t1 - t0); #endif } return 0; @@ -3254,11 +3246,11 @@ static herr_t H5VL_cache_ext_dataset_close(void *dset, hid_t dxpl_id, p->async_close_task_list->obj = NULL; double t1 = MPI_Wtime(); #ifndef NDEBUG - snprintf(log_buffer, LOG_BUFFER_SIZE, - "dataset close time: " - "%.6f seconds", - t1 - t0); - LOG_DEBUG(-1, "%s", log_buffer); + + LOG_DEBUG(-1, + "dataset close time: " + "%.6f seconds", + t1 - t0); #endif return ret_value; @@ -3284,8 +3276,7 @@ static herr_t H5VL_cache_ext_dataset_close(void *dset, hid_t dxpl_id, double t1 = MPI_Wtime(); #ifndef NDEBUG - snprintf(log_buffer, LOG_BUFFER_SIZE, "H5VLdataset_close time: %f", t1 - t0); - LOG_DEBUG(-1, "%s", log_buffer); + LOG_DEBUG(-1, "H5VLdataset_close time: %f", t1 - t0); #endif /* Check for async request */ @@ -3297,9 +3288,7 @@ static herr_t H5VL_cache_ext_dataset_close(void *dset, hid_t dxpl_id, H5VL_cache_ext_free_obj(o); double tt1 = MPI_Wtime(); #ifndef NDEBUG - snprintf(log_buffer, LOG_BUFFER_SIZE, - "H5VL_cache_ext_dataset_close time: %.6f seconds", tt1 - tt0); - LOG_DEBUG(-1, "%s", log_buffer); + LOG_DEBUG(-1, "H5VL_cache_ext_dataset_close time: %.6f seconds", tt1 - tt0); #endif return ret_value; @@ -5846,8 +5835,7 @@ static herr_t remove_dataset_cache_on_local_storage(void *dset, void **req) { H5VL_cache_ext_dataset_wait(dset); double t1 = MPI_Wtime(); #ifndef NDEBUG - snprintf(log_buffer, LOG_BUFFER_SIZE, "dataset_wait time: %f", t1 - t0); - LOG_DEBUG(-1, "%s", log_buffer); + LOG_DEBUG(-1, "dataset_wait time: %f", t1 - t0); #endif o->H5DWMM = NULL; } From c15ce54c6609fcd51cade9c62ef54fc69e6684ca Mon Sep 17 00:00:00 2001 From: github-actions Date: Tue, 11 Feb 2025 00:13:55 +0000 Subject: [PATCH 43/47] Committing clang-format changes --- src/H5VLcache_ext.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/H5VLcache_ext.c b/src/H5VLcache_ext.c index d2470c5..448626f 100644 --- a/src/H5VLcache_ext.c +++ b/src/H5VLcache_ext.c @@ -3247,7 +3247,7 @@ static herr_t H5VL_cache_ext_dataset_close(void *dset, hid_t dxpl_id, double t1 = MPI_Wtime(); #ifndef NDEBUG - LOG_DEBUG(-1, + LOG_DEBUG(-1, "dataset close time: " "%.6f seconds", t1 - t0); From 842f4ffae46e93e7da3856c43e6635987d1c9fa4 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Mon, 10 Feb 2025 18:25:56 -0600 Subject: [PATCH 44/47] typo --- src/H5VLcache_ext.c | 1 + 1 file changed, 1 insertion(+) diff --git a/src/H5VLcache_ext.c b/src/H5VLcache_ext.c index 448626f..5af26a1 100644 --- a/src/H5VLcache_ext.c +++ b/src/H5VLcache_ext.c @@ -98,6 +98,7 @@ int RANK = 0; int NPROC = 1; hbool_t HDF5_CACHE_CLOSE_ASYNC = 0; +char log_buffer[LOG_BUFFER_SIZE]; // Functions from async VOL int H5VL_async_set_delay_time(uint64_t time_us); herr_t H5VL_async_set_request_dep(void *request, void *parent_request); From afbb66438c052a32fb3f85890840ef83f4114760 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Tue, 11 Feb 2025 09:43:23 -0600 Subject: [PATCH 45/47] updated LOG prints --- src/H5LS.c | 48 ++++++++++----------------------------------- src/H5VLcache_ext.c | 31 ++++++++--------------------- 2 files changed, 18 insertions(+), 61 deletions(-) diff --git a/src/H5LS.c b/src/H5LS.c index 5228a63..2e7f43d 100644 --- a/src/H5LS.c +++ b/src/H5LS.c @@ -59,10 +59,8 @@ extern int RANK; extern int NPROC; -#define MAX_TRUNC_MSG_LEN 128 #define ERROR_MSG_SIZE 283 char error_msg[ERROR_MSG_SIZE]; -char truncated_msg[MAX_TRUNC_MSG_LEN]; /* Get the corresponding mmap function struct based on the type of node local @@ -80,19 +78,10 @@ const H5LS_mmap_class_t *get_H5LS_mmap_class_t(char *type) { p = &H5LS_GPU_mmap_ext_g; #endif } else { - size_t copy_len = - snprintf(truncated_msg, sizeof(truncated_msg), "%s", type); - if (copy_len >= MAX_TRUNC_MSG_LEN) { - LOG_WARN(-1, "Storage type string truncated"); - } - int ret = snprintf(error_msg, ERROR_MSG_SIZE, - "I don't know the type of storage: %s\n" - "Supported options: SSD|BURST_BUFFER|MEMORY|GPU\n", - truncated_msg); - if (ret < 0 || ret >= ERROR_MSG_SIZE) { - LOG_WARN(-1, "Storage type string truncated"); - } - LOG_ERROR(-1, "%s", error_msg); + LOG_ERROR(-1, + "I don't know the type of storage: %s\n" + "Supported options: SSD|BURST_BUFFER|MEMORY|GPU\n", + type); MPI_Abort(MPI_COMM_WORLD, 111); } return p; @@ -111,14 +100,7 @@ cache_replacement_policy_t get_replacement_policy_from_str(char *str) { else if (!strcmp(str, "LIFO")) return LIFO; else { - if (strlen(str) < 200) { - snprintf(error_msg, ERROR_MSG_SIZE, - "unknown cache replacement type: %s\n", str); - } else { - snprintf(error_msg, ERROR_MSG_SIZE, - "unknown cache replacement type: string too long to display\n"); - } - LOG_ERROR(-1, "%s", error_msg); + LOG_ERROR(-1, "unknown cache replacement type: %s\n", str); return FAIL; } } @@ -205,12 +187,7 @@ herr_t readLSConf(char *fname, cache_storage_t *LS) { if (get_replacement_policy_from_str(mac) > 0) LS->replacement_policy = get_replacement_policy_from_str(mac); } else { - char temp_ip[256]; - strncpy(temp_ip, ip, sizeof(temp_ip) - 1); - temp_ip[sizeof(temp_ip) - 1] = '\0'; - snprintf(error_msg, ERROR_MSG_SIZE, "Unknown configuration setup: %s", - temp_ip); - LOG_WARN(-1, "%s", error_msg); + LOG_WARN(-1, "Unknown configuration setup:", ip); } } if (LS->mspace_total < LS->write_buffer_size) { @@ -425,12 +402,9 @@ herr_t H5LSclaim_space(cache_storage_t *LS, hsize_t size, cache_claim_t type, if (LS->mspace_left > size) { LS->mspace_left = LS->mspace_left - size; #ifndef NDEBUG - snprintf(error_msg, ERROR_MSG_SIZE, "Claimed: %.4f GiB\n", - size / 1024. / 1024. / 1024.); - LOG_DEBUG(-1, "%s", error_msg); - snprintf(error_msg, ERROR_MSG_SIZE, "LS->space left: %.4f GiB\n", - LS->mspace_left / 1024. / 1024 / 1024.); - LOG_DEBUG(-1, "%s", error_msg); + LOG_DEBUG(-1, "Claimed: %.4f GiB\n", size / 1024. / 1024. / 1024.); + LOG_DEBUG(-1, "LS->space left: %.4f GiB\n", + LS->mspace_left / 1024. / 1024 / 1024.); #endif return SUCCEED; } else { @@ -451,9 +425,7 @@ herr_t H5LSclaim_space(cache_storage_t *LS, hsize_t size, cache_claim_t type, stay = tmp; if (mspace < size) { #ifndef NDEBUG - snprintf(error_msg, ERROR_MSG_SIZE, "mspace (bytes): %f - %lu\n", - mspace, size); - LOG_DEBUG(-1, "%s", error_msg); + LOG_DEBUG(-1, "mspace (bytes): %f - %lu\n", mspace, size); #endif return FAIL; } else { diff --git a/src/H5VLcache_ext.c b/src/H5VLcache_ext.c index 5af26a1..18695d4 100644 --- a/src/H5VLcache_ext.c +++ b/src/H5VLcache_ext.c @@ -44,8 +44,6 @@ #include #include #include -// debug -#define LOG_BUFFER_SIZE 1024 // VOL related header #include "H5LS.h" #include "H5VLcache_ext_private.h" @@ -98,7 +96,6 @@ int RANK = 0; int NPROC = 1; hbool_t HDF5_CACHE_CLOSE_ASYNC = 0; -char log_buffer[LOG_BUFFER_SIZE]; // Functions from async VOL int H5VL_async_set_delay_time(uint64_t time_us); herr_t H5VL_async_set_request_dep(void *request, void *parent_request); @@ -1450,22 +1447,11 @@ static herr_t H5VL_cache_ext_str_to_info(const char *str, void **_info) { LOG_INFO(-1, " storage path: %s", p->H5LS->path); - int ret = - snprintf(log_buffer, LOG_BUFFER_SIZE, " storage size: %.4f GiB", - p->H5LS->mspace_total / 1024. / 1024. / 1024.); - if (ret < 0 || ret >= LOG_BUFFER_SIZE) { - LOG_WARN(-1, "Log Error when formatting storage size message"); - } else { - LOG_INFO(-1, "%s", log_buffer); - } + LOG_INFO(-1, " storage size: %.4f GiB", + p->H5LS->mspace_total / 1024. / 1024. / 1024.); - ret = snprintf(log_buffer, LOG_BUFFER_SIZE, " write buffer size: %.4f GiB", - p->H5LS->write_buffer_size / 1024. / 1024. / 1024.); - if (ret < 0 || ret >= LOG_BUFFER_SIZE) { - LOG_WARN(-1, "Log Error when formatting write buffer size message"); - } else { - LOG_INFO(-1, "%s", log_buffer); - } + LOG_INFO(-1, " write buffer size: %.4f GiB", + p->H5LS->write_buffer_size / 1024. / 1024. / 1024.); LOG_INFO(-1, " storage type: %s", p->H5LS->type); @@ -3263,11 +3249,10 @@ static herr_t H5VL_cache_ext_dataset_close(void *dset, hid_t dxpl_id, double t1 = MPI_Wtime(); #ifndef NDEBUG - snprintf(log_buffer, LOG_BUFFER_SIZE, - "dataset remove cache time (including wait time): " - "%.6f seconds", - t1 - t0); - LOG_DEBUG(-1, "%s", log_buffer); + LOG_DEBUG(-1, + "dataset remove cache time (including wait time): " + "%.6f seconds", + t1 - t0); #endif } From f42d03191ba7409f3f48a32c6e44c743e1e3dbfd Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Tue, 11 Feb 2025 09:55:08 -0600 Subject: [PATCH 46/47] cleanup --- .github/{ => workflows}/codeql-config.yml | 0 .github/workflows/codeql.yml | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename .github/{ => workflows}/codeql-config.yml (100%) diff --git a/.github/codeql-config.yml b/.github/workflows/codeql-config.yml similarity index 100% rename from .github/codeql-config.yml rename to .github/workflows/codeql-config.yml diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 583771f..000b066 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -119,7 +119,7 @@ jobs: with: languages: c-cpp build-mode: manual - config-file: ./.github/codeql-config.yml + config-file: ./.github/workflows/codeql-config.yml # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. From e18aa7ac2e0c44dda91f306ab1b4fec342d10d53 Mon Sep 17 00:00:00 2001 From: Scot Breitenfeld Date: Tue, 11 Feb 2025 10:01:52 -0600 Subject: [PATCH 47/47] cleanup --- .github/{workflows => }/codeql-config.yml | 0 .github/workflows/codeql.yml | 2 +- 2 files changed, 1 insertion(+), 1 deletion(-) rename .github/{workflows => }/codeql-config.yml (100%) diff --git a/.github/workflows/codeql-config.yml b/.github/codeql-config.yml similarity index 100% rename from .github/workflows/codeql-config.yml rename to .github/codeql-config.yml diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 000b066..583771f 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -119,7 +119,7 @@ jobs: with: languages: c-cpp build-mode: manual - config-file: ./.github/workflows/codeql-config.yml + config-file: ./.github/codeql-config.yml # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file.