diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 6a1cfbde..6df42de3 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -10,52 +10,82 @@ on: - 'docs/**' - 'COPYING' - '**.md' +env: + ADMIN_PASSWORD: admin + ADMIN_USERNAME: admin + USER_NAME: test_user1 + USER_PASSWORD: test + USER2_NAME: test_user2 + USER2_PASSWORD: test + HSDS_USERNAME: test_user1 + HSDS_PASSWORD: test + HSDS_PATH: /home/test_user1/ + HDF5_API_TEST_PATH_PREFIX: /home/test_user1/ + HSDS_ENDPOINT: http+unix://%2Ftmp%2Fhs%2Fsn_1.sock + HDF5_VOL_CONNECTOR: REST + ROOT_DIR: ${{github.workspace}}/hsdsdata + BUCKET_NAME: hsdstest jobs: - build_and_test_rest_vol: + build_and_test_with_autotools: strategy: fail-fast: false matrix: os: [ubuntu-latest] - python-version: ["3.8", "3.9", "3.10"] - endpoint: ["http+unix://%2Ftmp%2Fhs%2Fsn_1.sock"] - + python-version: ["3.10"] + hdf5-branch: ["hdf5_1_14", "develop"] + runs-on: ${{matrix.os}} steps: - - name: Create environment variable setup script - working-directory: ${{github.workspace}} - run: | - echo "#! /bin/sh" >> setup_env_vars.sh - printf '%s\n' 'echo "ADMIN_PASSWORD=admin" >> $GITHUB_ENV' >> setup_env_vars.sh - printf '%s\n' 'export ADMIN_PASSWORD=admin' >> setup_env_vars.sh - printf '%s\n' 'echo "ADMIN_USERNAME=admin" >> $GITHUB_ENV' >> setup_env_vars.sh - printf '%s\n' 'export ADMIN_USERNAME=admin' >> setup_env_vars.sh - printf '%s\n' 'echo "USER_NAME=test_user1" >> $GITHUB_ENV' >> setup_env_vars.sh - printf '%s\n' 'export USER_NAME=test_user1' >> setup_env_vars.sh - printf '%s\n' 'echo "USER_PASSWORD=test" >> $GITHUB_ENV' >> setup_env_vars.sh - printf '%s\n' 'export USER_PASSWORD=test' >> setup_env_vars.sh - printf '%s\n' 'echo "USER2_NAME=test_user2" >> $GITHUB_ENV' >> setup_env_vars.sh - printf '%s\n' 'export USER2_NAME=test_user2' >> setup_env_vars.sh - printf '%s\n' 'echo "USER2_PASSWORD=test" >> $GITHUB_ENV' >> setup_env_vars.sh - printf '%s\n' 'export USER2_PASSWORD=test' >> setup_env_vars.sh - printf '%s\n' 'echo "HSDS_USERNAME=test_user1" >> $GITHUB_ENV' >> setup_env_vars.sh - printf '%s\n' 'export HSDS_USERNAME=test_user1' >> setup_env_vars.sh - printf '%s\n' 'echo "HSDS_PASSWORD=test" >> $GITHUB_ENV' >> setup_env_vars.sh - printf '%s\n' 'export HSDS_PASSWORD=test' >> setup_env_vars.sh - printf '%s\n' 'echo "HSDS_PATH=/home/test_user1/" >> $GITHUB_ENV' >> setup_env_vars.sh - printf '%s\n' 'export HSDS_PATH=/home/test_user1/' >> setup_env_vars.sh - printf '%s\n' 'echo "HSDS_ENDPOINT=${{matrix.endpoint}}" >> $GITHUB_ENV' >> setup_env_vars.sh - printf '%s\n' 'export HSDS_ENDPOINT=${{matrix.endpoint}}' >> setup_env_vars.sh - printf '%s\n' 'echo "HDF5_VOL_CONNECTOR=REST" >> $GITHUB_ENV' >> setup_env_vars.sh - printf '%s\n' 'export HDF5_VOL_CONNECTOR=REST ' >> setup_env_vars.sh - printf '%s\n' 'echo "HDF5_PLUGIN_PATH=${{github.workspace}}/hdf5/build/bin/" >> $GITHUB_ENV' >> setup_env_vars.sh - printf '%s\n' 'export HDF5_PLUGIN_PATH=${{github.workspace}}/hdf5/build/bin/' >> setup_env_vars.sh - printf '%s\n' 'echo "ROOT_DIR=${{github.workspace}}/hsdsdata" >> $GITHUB_ENV' >> setup_env_vars.sh - printf '%s\n' 'export ROOT_DIR=${{github.workspace}}/hsdsdata' >> setup_env_vars.sh - printf '%s\n' 'echo "BUCKET_NAME=hsdstest" >> $GITHUB_ENV' >> setup_env_vars.sh - printf '%s\n' 'export BUCKET_NAME=hsdstest' >> setup_env_vars.sh - chmod 777 setup_env_vars.sh - + - uses: actions/checkout@v3 + with: + repository: HDFGroup/hdf5 + ref: ${{matrix.hdf5-branch}} + path: ${{github.workspace}}/hdf5 + + - name: Get REST VOL dependencies + run: | + sudo apt-get install libcurl4-openssl-dev + sudo apt-get install libyajl-dev + + - name: Get Autotools Dependencies + run: | + sudo apt update + sudo apt install automake autoconf libtool libtool-bin + + - name: Get REST VOL + uses: actions/checkout@v3 + with: + path: ${{github.workspace}}/vol-rest + ref: build_under_lib_workflow + repository: mattjala/vol-rest # TODO: For workflow testing + + - name: Autotools Configure + Build HDF5 + run: | + ./autogen.sh + ./configure --prefix=${{github.workspace}}/hdf5install \ + --enable-hl --disable-threadsafe \ + --enable-build-mode=production --enable-shared + make + make install + shell: bash + working-directory: ${{github.workspace}}/hdf5 + + - name: Autotools Configure REST VOL + run: | + ./autogen.sh + mkdir ${{github.workspace}}/rest_vol_install + CFLAGS="-D_POSIX_C_SOURCE=200809L" ./configure --prefix=${{github.workspace}}/rest_vol_install --with-hdf5=${{github.workspace}}/hdf5install + shell: bash + working-directory: ${{github.workspace}}/vol-rest + + - name: Build + Install REST VOL + run: | + make + make install + shell: bash + working-directory: ${{github.workspace}}/vol-rest/ + - uses: actions/cache/restore@v3 with: key: hsds @@ -97,84 +127,331 @@ jobs: run: | cd ${{github.workspace}}/hsds pytest - + + - name: Install valgrind + run: sudo apt install valgrind + working-directory: ${{ github.workspace }} + + - name: Start HSDS + if: ${{ matrix.endpoint != 'http://127.0.0.1:5101'}} + run: | + cd ${{github.workspace}}/hsds + mkdir ${{github.workspace}}/hsdsdata && + mkdir ${{github.workspace}}/hsdsdata/hsdstest && + cp admin/config/groups.default admin/config/groups.txt && + cp admin/config/passwd.default admin/config/passwd.txt && + cp admin/config/groups.default admin/config/groups.txt && + cp admin/config/passwd.default admin/config/passwd.txt + ROOT_DIR=${{github.workspace}}/hsdadata ./runall.sh --no-docker 1 & + sleep 10 + working-directory: ${{github.workspace}}/hsds + + - name: Test HSDS + if: ${{matrix.endpoint != 'http://127.0.0.1:5101'}} + run: | + python tests/integ/setup_test.py + working-directory: ${{github.workspace}}/hsds + + - name: Test REST VOL + working-directory: ${{github.workspace}}/vol-rest/ + run: | + HDF5_PLUGIN_PATH=${{github.workspace}}/rest_vol_install/lib HDF5_VOL_CONNECTOR=REST ./test/test_rest_vol + + build_and_test_with_cmake: + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: ["3.10"] + hdf5-branch: ["hdf5_1_14", "develop"] + + runs-on: ${{matrix.os}} + steps: + - uses: actions/checkout@v3 + with: + repository: HDFGroup/hdf5 + ref: ${{matrix.hdf5-branch}} + path: ${{github.workspace}}/hdf5 + + - name: Get REST VOL dependencies + run: | + sudo apt-get install libcurl4-openssl-dev + sudo apt-get install libyajl-dev + - name: Get REST VOL uses: actions/checkout@v3 with: - path: ${{github.workspace}}/rest-vol + path: ${{github.workspace}}/vol-rest + ref: build_under_lib_workflow + repository: mattjala/vol-rest # TODO: For workflow testing + + - name: CMake Configure + Build HDF5 + run: | + mkdir ${{github.workspace}}/hdf5/build + cd ./build + cmake \ + -DHDF5_BUILD_HL_LIB=ON \ + -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_SZIP_SUPPORT=OFF \ + -DHDF5_TEST_API=ON \ + -DHDF5_ENABLE_Z_LIB_SUPPORT=OFF \ + -DCMAKE_BUILD_TYPE=RelWithDebInfo -DHDF5_ENABLE_THREADSAFE=OFF \ + -DCMAKE_INSTALL_PREFIX=${{github.workspace}}/hdf5install \ + .. + make + make install + shell: bash + working-directory: ${{github.workspace}}/hdf5 + + - name: CMake Configure REST VOL + run: | + mkdir ./build + cd ./build + CFLAGS="-D_POSIX_C_SOURCE=200809L" cmake -G "Unix Makefiles" -DHDF5_ROOT=${{github.workspace}}/hdf5install \ + -DCMAKE_INSTALL_PREFIX=${{github.workspace}}/vol-rest/install \ + .. + shell: bash + working-directory: ${{github.workspace}}/vol-rest + + - name: Build + Install REST VOL + run: | + make + make install + shell: bash + working-directory: ${{github.workspace}}/vol-rest/build + + - uses: actions/cache/restore@v3 + with: + key: hsds + path: ${{github.workspace}}/hsds + id: hsds-restore + + - uses: actions/checkout@v3 + if: ${{!steps.hsds-restore.outputs.cache-hit}} + with: + repository: HDFGroup/hsds + path: ${{github.workspace}}/hsds + + - uses: actions/cache/save@v3 + if: ${{!steps.hsds-restore.outputs.cache-hit}} + with: + key: hsds + path: ${{github.workspace}}/hsds + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + - name: Install HSDS dependencies + shell: bash + run: | + python -m pip install --upgrade pip + python -m pip install pytest + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + + - name: Install HSDS package + shell: bash + run: | + cd ${{github.workspace}}/hsds + pip install -e . + + - name: Run HSDS unit tests + shell: bash + run: | + cd ${{github.workspace}}/hsds + pytest + + - name: Install valgrind + run: sudo apt install valgrind + working-directory: ${{ github.workspace }} + + - name: Start HSDS + if: ${{ matrix.endpoint != 'http://127.0.0.1:5101'}} + run: | + cd ${{github.workspace}}/hsds + mkdir ${{github.workspace}}/hsdsdata && + mkdir ${{github.workspace}}/hsdsdata/hsdstest && + cp admin/config/groups.default admin/config/groups.txt && + cp admin/config/passwd.default admin/config/passwd.txt && + cp admin/config/groups.default admin/config/groups.txt && + cp admin/config/passwd.default admin/config/passwd.txt + ROOT_DIR=${{github.workspace}}/hsdadata ./runall.sh --no-docker 1 & + sleep 10 + working-directory: ${{github.workspace}}/hsds + + - name: Test HSDS + if: ${{matrix.endpoint != 'http://127.0.0.1:5101'}} + run: | + python tests/integ/setup_test.py + working-directory: ${{github.workspace}}/hsds + + - name: Test REST VOL + working-directory: ${{github.workspace}}/hdf5/build/ + run: | + echo "HDF5_PLUGIN_PATH=${{github.workspace}}/hdf5/build/bin/" >> $GITHUB_ENV + valgrind --leak-check=full -s ctest -R "test_rest_vol" -VV + + build_with_fetchcontent: + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: ["3.10"] + vol-source: ['GIT', 'LOCAL_DIR'] + hdf5-branch: ["hdf5_1_14", "develop"] + + runs-on: ${{matrix.os}} + steps: - uses: actions/checkout@v3 with: - repository: mattjala/hdf5 # TODO: For workflow testing - ref: vol_cmake # TODO: For workflow testing + repository: HDFGroup/hdf5 + ref: ${{matrix.hdf5-branch}} path: ${{github.workspace}}/hdf5 - - name: Get REST-VOL dependencies + - name: Get REST VOL dependencies run: | sudo apt-get install libcurl4-openssl-dev sudo apt-get install libyajl-dev - - name: CMake Configure HDF5 + - name: Get REST VOL + if: ${{matrix.vol-source == 'LOCAL_DIR'}} + uses: actions/checkout@v3 + with: + path: ${{github.workspace}}/vol-rest + ref: build_under_lib_workflow + repository: mattjala/vol-rest # TODO: For workflow testing + + - name: CMake Configure HDF5 + REST VOL from GIT + if: ${{matrix.vol-source == 'GIT'}} run: | - mkdir ./build + mkdir ${{github.workspace}}/hdf5/build cd ./build - cmake -DHDF5_BUILD_HL_LIB=OFF -DHDF5_VOL_REST_ENABLE_EXAMPLES=OFF \ - -DBUILD_STATIC_LIBS=OFF \ + cmake -DHDF5_VOL_REST_ENABLE_EXAMPLES=ON \ + -DHDF5_BUILD_HL_LIB=ON \ -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_SZIP_SUPPORT=OFF \ - -DHDF5_TEST_API=ON -DHDF5_VOL_ALLOW_EXTERNAL=ON \ + -DHDF5_TEST_API=ON -DHDF5_VOL_ALLOW_EXTERNAL='GIT' \ -DHDF5_VOL_URL01=https://github.com/mattjala/vol-rest.git \ - -DHDF5_VOL_VOL-REST_BRANCH="build_under_library" \ + -DHDF5_VOL_VOL-REST_BRANCH=build_under_lib_workflow \ -DHDF5_VOL_VOL-REST_NAME="REST" -DHDF5_VOL_VOL-REST_TEST_PARALLEL=OFF \ - -DHDF5_ENABLE_Z_LIB_SUPPORT=OFF -DHDF5_ALLOW_EXTERNAL_SUPPORT="GIT" \ + -DHDF5_ENABLE_Z_LIB_SUPPORT=OFF \ -DCMAKE_BUILD_TYPE=RelWithDebInfo -DHDF5_ENABLE_THREADSAFE=OFF \ -DCMAKE_INSTALL_PREFIX=${{github.workspace}}/hdf5install \ .. shell: bash working-directory: ${{github.workspace}}/hdf5 - - - name: Build HDF5 + REST VOL - run: make - working-directory: ${{ github.workspace }}/hdf5/build - - name: Install HDF + REST VOL - run: make install - working-directory: ${{ github.workspace }}/hdf5/build + - name: CMake Configure HDF5 + REST VOL from LOCAL_DIR + if: ${{matrix.vol-source == 'LOCAL_DIR'}} + run: | + mkdir ${{github.workspace}}/hdf5/build + sudo cmake -DHDF5_BUILD_HL_LIB=ON -DHDF5_VOL_REST_ENABLE_EXAMPLES=ON \ + -DHDF5_ENABLE_SZIP_SUPPORT=OFF \ + -DBUILD_SHARED_LIBS=ON -DHDF5_ENABLE_Z_LIB_SUPPORT=OFF \ + -DHDF5_TEST_API=ON -DHDF5_VOL_ALLOW_EXTERNAL=${{matrix.vol-source}} \ + -DHDF5_VOL_PATH01=${{github.workspace}}/vol-rest \ + -DHDF5_VOL_VOL-REST_NAME="REST" -DHDF5_VOL_VOL-REST_TEST_PARALLEL=OFF \ + -DCMAKE_BUILD_TYPE=RelWithDebInfo \ + -DCMAKE_INSTALL_PREFIX=${{github.workspace}}/hdf5install \ + --log-level=verbose \ + -B ${{github.workspace}}/hdf5/build + + shell: bash + working-directory: ${{github.workspace}}/hdf5/ + + - name: CMake Build HDF5 + REST VOL + run: | + sudo make + shell: bash + working-directory: ${{github.workspace}}/hdf5/build + + - name: CMake Install HDF5 + REST VOL + run: | + sudo make install + shell: bash + working-directory: ${{github.workspace}}/hdf5/build - name: Install valgrind run: sudo apt install valgrind working-directory: ${{ github.workspace }} - + + - uses: actions/cache/restore@v3 + with: + key: hsds + path: ${{github.workspace}}/hsds + id: hsds-restore + + - uses: actions/checkout@v3 + if: ${{!steps.hsds-restore.outputs.cache-hit}} + with: + repository: HDFGroup/hsds + path: ${{github.workspace}}/hsds + + - uses: actions/cache/save@v3 + if: ${{!steps.hsds-restore.outputs.cache-hit}} + with: + key: hsds + path: ${{github.workspace}}/hsds + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v3 + with: + python-version: ${{ matrix.python-version }} + + - name: Install HSDS dependencies + shell: bash + run: | + python -m pip install --upgrade pip + python -m pip install pytest + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + + - name: Install HSDS package + shell: bash + run: | + cd ${{github.workspace}}/hsds + pip install -e . + + - name: Run HSDS unit tests + shell: bash + run: | + cd ${{github.workspace}}/hsds + pytest + - name: Start HSDS if: ${{ matrix.endpoint != 'http://127.0.0.1:5101'}} run: | - cd ${{github.workspace}}/hsds - mkdir ${{github.workspace}}/hsdsdata && - mkdir ${{github.workspace}}/hsdsdata/hsdstest && - cp admin/config/groups.default admin/config/groups.txt && - cp admin/config/passwd.default admin/config/passwd.txt && - cp admin/config/groups.default admin/config/groups.txt && - cp admin/config/passwd.default admin/config/passwd.txt && - ${{github.workspace}}/setup_env_vars.sh - ROOT_DIR=${{github.workspace}}/hsdadata ./runall.sh --no-docker 1 & - sleep 10 + cd ${{github.workspace}}/hsds + mkdir ${{github.workspace}}/hsdsdata && + mkdir ${{github.workspace}}/hsdsdata/hsdstest && + cp admin/config/groups.default admin/config/groups.txt && + cp admin/config/passwd.default admin/config/passwd.txt && + cp admin/config/groups.default admin/config/groups.txt && + cp admin/config/passwd.default admin/config/passwd.txt + ROOT_DIR=${{github.workspace}}/hsdadata ./runall.sh --no-docker 1 & + sleep 10 working-directory: ${{github.workspace}}/hsds - name: Test HSDS if: ${{matrix.endpoint != 'http://127.0.0.1:5101'}} run: | - ${{github.workspace}}/setup_env_vars.sh - python tests/integ/setup_test.py + python tests/integ/setup_test.py working-directory: ${{github.workspace}}/hsds - name: Test REST VOL working-directory: ${{github.workspace}}/hdf5/build/ run: | - ${{github.workspace}}/setup_env_vars.sh - valgrind --leak-check=full -s ctest -R "test_rest_vol" -VV + sudo \ + HDF5_PLUGIN_PATH="${{github.workspace}}/hdf5install/lib" HDF5_VOL_CONNECTOR=REST \ + ADMIN_PASSWORD=admin ADMIN_USERNAME=admin \ + USER_NAME=test_user1 USER_PASSWORD=test \ + USER2_NAME=test_user2 USER2_PASSWORD=test \ + HSDS_USERNAME=test_user1 HSDS_PASSWORD=test \ + HSDS_PATH=/home/test_user1/ HDF5_API_TEST_PATH_PREFIX=/home/test_user1/ \ + HSDS_ENDPOINT=http+unix://%2Ftmp%2Fhs%2Fsn_1.sock HDF5_VOL_CONNECTOR=REST \ + ROOT_DIR=${{github.workspace}}/hsdsdata BUCKET_NAME=hsdstest \ + valgrind --leak-check=full -s ctest -R "test_rest_vol" -VV # TODO: Attribute, dataset, link, and testhdf5 tests currently fail # - name: Test REST VOL with API # run: | -# ${{github.workspace}}/setup_env_vars.sh # valgrind --leak-check=full -s ctest -R "vol-rest" -VV # working-directory: ${{github.workspace}}/hdf5/build/ diff --git a/.gitmodules b/.gitmodules index e69de29b..7c1f0753 100644 --- a/.gitmodules +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "test/vol-tests"] + path = test/vol-tests + url = https://github.com/HDFGroup/vol-tests diff --git a/CMakeLists.txt b/CMakeLists.txt index 90ab91b4..8c91971f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -101,8 +101,6 @@ set (HDF5_VOL_REST_TEST_SRC_DIR ${HDF5_VOL_REST_SOURCE_DIR}/test) set (HDF5_VOL_REST_EXAMPLES_DIR ${HDF5_VOL_REST_SOURCE_DIR}/examples) set (HDF5_DIR_NAME "hdf5") set (HDF5_HL_DIR_NAME "hl") -#set (HDF5_DIR ${HDF5_VOL_REST_SRC_DIR}/${HDF5_DIR_NAME}) -#set (HDF5_BINARY_DIR ${HDF5_VOL_REST_BINARY_DIR}/${HDF5_DIR_NAME}) #----------------------------------------------------------------------------- # Find HDF5, cURL and YAJL before building @@ -111,6 +109,12 @@ if (HDF5_FOUND STREQUAL "") set (HDF5_FOUND FALSE) endif() +find_package(HDF5 MODULE COMPONENTS C HL) + +if (NOT HDF5_FOUND) + message(SEND_ERROR "HDF5 not found!") +endif() + if (HDF5_ENABLE_THREADSAFE AND NOT Threads_FOUND) find_package(Threads) endif () @@ -457,19 +461,11 @@ set (HDF5_VOL_REST_INCLUDE_DIRECTORIES ${CMAKE_RUNTIME_OUTPUT_DIRECTORY} ) -if (HDF5_FOUND) - list(APPEND HDF5_VOL_REST_INCLUDE_DIRECTORIES - ${HDF5_INCLUDE_DIRS} - ${HDF5_BINARY_DIR}/src - ${HDF5_SOURCE_DIR}/${HDF5_HL_DIR_NAME}/src - ) -else () - message(SEND_ERROR "HDF5 not found!") - #list(APPEND HDF5_VOL_REST_INCLUDE_DIRECTORIES - #${HDF5_DIR} - #${HDF5_BINARY_DIR} - #) -endif () +list(APPEND HDF5_VOL_REST_INCLUDE_DIRECTORIES + ${HDF5_INCLUDE_DIRS} + ${HDF5_BINARY_DIR}/src + ${HDF5_SOURCE_DIR}/${HDF5_HL_DIR_NAME}/src +) set(HDF5_VOL_REST_INCLUDE_DIRECTORIES ${HDF5_VOL_REST_INCLUDE_DIRECTORIES} @@ -577,7 +573,7 @@ include (${HDF5_VOL_REST_RESOURCES_DIR}/HDFCompilerFlags.cmake) #----------------------------------------------------------------------------- include_directories(${HDF5_INCLUDE_DIRS}) -set (HDF5_LIBRARIES_TO_EXPORT +list (APPEND HDF5_LIBRARIES_TO_EXPORT ${HDF5_C_HL_LIBRARIES} ) diff --git a/autogen.sh b/autogen.sh new file mode 100755 index 00000000..ec5fc1e7 --- /dev/null +++ b/autogen.sh @@ -0,0 +1,204 @@ +#!/bin/sh +# +# Copyright by The HDF Group. +# All rights reserved. +# +# This file is part of the HDF5 REST VOL connector. The full copyright +# notice, including terms governing use, modification, and redistribution, +# is contained in the COPYING file, which can be found at the root of the +# source code distribution tree. +# + +# A script to reconfigure autotools for the HDF5 REST VOL connector. +# +# IMPORTANT OS X NOTE +# +# If you are using OS X, you will probably not have the autotools +# installed, even if you have the Xcode command-line tools. +# +# The easiest way to fix this is to install everything via Homebrew: +# +# http://brew.sh/ +# +# After you install the base packages, install autoconf, automake, +# and libtool. +# +# brew install autoconf +# brew install automake +# brew install libtool +# +# This only takes a few minutes. Note that libtool and libtoolize will +# be glibtool and glibtoolize so as not to conflict with Apple's non-gnu +# tools. This autogen.sh script handles this for you. +# +# END IMPORTANT OS X NOTE +# +# If you want to use a particular version of the autotools, the paths +# to each tool can be overridden using the following environment +# variables: +# +# HDF5_ACLOCAL +# HDF5_AUTOHEADER +# HDF5_AUTOMAKE +# HDF5_AUTOCONF +# HDF5_LIBTOOL +# HDF5_M4 +# +# Note that aclocal will attempt to include libtool's share/aclocal +# directory. +# +# This script takes one potential option: +# +# -v +# +# This emits some extra information, mainly tool versions. + +echo +echo "******************************" +echo "* REST VOL autogen.sh script *" +echo "******************************" +echo + +# Default is not verbose output +verbose=false + +optspec=":hv-" +while getopts "$optspec" optchar; do + case "${optchar}" in + h) + echo "usage: $0 [OPTIONS]" + echo + echo " -h Print this help message." + echo + echo " -v Show more verbose output." + echo + echo " NOTE: Each tool can be set via an environment variable." + echo " These are documented inside this autogen.sh script." + echo + exit 0 + ;; + v) + echo "Setting verbosity: high" + echo + verbose=true + ;; + *) + if [ "$OPTERR" != 1 ] || case $optspec in :*) ;; *) false; esac; then + echo "ERROR: non-option argument: '-${OPTARG}'" >&2 + echo "Quitting" + exit 1 + fi + ;; + esac +done + +# If paths to autotools are not specified, use whatever the system +# has installed as the default. We use 'which ' to +# show exactly what's being used. +if test -z ${HDF5_AUTOCONF}; then + HDF5_AUTOCONF=$(which autoconf) +fi +if test -z ${HDF5_AUTOMAKE}; then + HDF5_AUTOMAKE=$(which automake) +fi +if test -z ${HDF5_AUTOHEADER}; then + HDF5_AUTOHEADER=$(which autoheader) +fi +if test -z ${HDF5_ACLOCAL}; then + HDF5_ACLOCAL=$(which aclocal) +fi +if test -z ${HDF5_LIBTOOL}; then + case "`uname`" in + Darwin*) + # libtool on OS-X is non-gnu + HDF5_LIBTOOL=$(which glibtool) + ;; + *) + HDF5_LIBTOOL=$(which libtool) + ;; + esac +fi +if test -z ${HDF5_M4}; then + HDF5_M4=$(which m4) +fi + + +# Make sure that these versions of the autotools are in the path +AUTOCONF_DIR=`dirname ${HDF5_AUTOCONF}` +LIBTOOL_DIR=`dirname ${HDF5_LIBTOOL}` +M4_DIR=`dirname ${HDF5_M4}` +PATH=${AUTOCONF_DIR}:${LIBTOOL_DIR}:${M4_DIR}:$PATH + +# Make libtoolize match the specified libtool +case "`uname`" in +Darwin*) + # On OS X, libtoolize could be named glibtoolize or + # libtoolize. Try the former first, then fall back + # to the latter if it's not found. + HDF5_LIBTOOLIZE="${LIBTOOL_DIR}/glibtoolize" + if [ ! -f $HDF5_LIBTOOLIZE ] ; then + HDF5_LIBTOOLIZE="${LIBTOOL_DIR}/libtoolize" + fi + ;; +*) + HDF5_LIBTOOLIZE="${LIBTOOL_DIR}/libtoolize" + ;; +esac + +# Run autotools in order +# +# When available, we use the --force option to ensure all files are +# updated. This prevents the autotools from re-running to fix dependencies +# during the 'make' step, which can be a problem if environment variables +# were set on the command line during autogen invocation. + +# LIBTOOLIZE +libtoolize_cmd="${HDF5_LIBTOOLIZE} --copy --force" +echo ${libtoolize_cmd} +if [ "$verbose" = true ] ; then + ${HDF5_LIBTOOLIZE} --version +fi +${libtoolize_cmd} || exit 1 +echo + +# ACLOCAL +if test -e "${LIBTOOL_DIR}/../share/aclocal" ; then + aclocal_include="-I ${LIBTOOL_DIR}/../share/aclocal" +fi +aclocal_cmd="${HDF5_ACLOCAL} --force -I m4 ${aclocal_include}" +echo ${aclocal_cmd} +if [ "$verbose" = true ] ; then + ${HDF5_ACLOCAL} --version +fi +${aclocal_cmd} || exit 1 +echo + +# AUTOHEADER +autoheader_cmd="${HDF5_AUTOHEADER} --force" +echo ${autoheader_cmd} +if [ "$verbose" = true ] ; then + ${HDF5_AUTOHEADER} --version +fi +${autoheader_cmd} || exit 1 +echo + +# AUTOMAKE +automake_cmd="${HDF5_AUTOMAKE} --copy --add-missing --force-missing" +echo ${automake_cmd} +if [ "$verbose" = true ] ; then + ${HDF5_AUTOMAKE} --version +fi +${automake_cmd} || exit 1 +echo + +# AUTOCONF +autoconf_cmd="${HDF5_AUTOCONF} --force" +echo ${autoconf_cmd} +if [ "$verbose" = true ] ; then + ${HDF5_AUTOCONF} --version +fi +${autoconf_cmd} || exit 1 +echo + +echo +exit 0 \ No newline at end of file diff --git a/build_vol_autotools.sh b/build_vol_autotools.sh new file mode 100755 index 00000000..8ce30bfb --- /dev/null +++ b/build_vol_autotools.sh @@ -0,0 +1,205 @@ +#!/bin/sh +# +# Copyright by The HDF Group. +# All rights reserved. +# +# This file is part of the HDF5 REST VOL connector. The full copyright +# notice, including terms governing use, modification, and redistribution, +# is contained in the COPYING file, which can be found at the root of the +# source code distribution tree. +# +# A script used to first configure and build the HDF5 source distribution +# included with the REST VOL connector source code, and then use that built +# HDF5 to build the REST VOL connector itself. + +# Get the directory of the script itself +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +# Set the default install directory +INSTALL_DIR="${SCRIPT_DIR}/rest_vol_build" + +# Default name of the directory for the included HDF5 source distribution, +# as well as the default directory where it gets installed +HDF5_DIR="src/hdf5" +HDF5_INSTALL_DIR="${INSTALL_DIR}" +build_hdf5=true + +# Determine the number of processors to use when +# building in parallel with Autotools make +NPROCS=0 + +# Compiler flags for linking with cURL and YAJL +CURL_DIR="" +CURL_LINK="-lcurl" +YAJL_DIR="" +YAJL_LINK="-lyajl" + +# Compiler flag for linking with the built REST VOL +REST_VOL_LINK="-lrestvol" + +# Extra compiler options passed to the various steps, such as -Wall +COMP_OPTS="-Wall -pedantic -Wunused-macros" + +# Extra options passed to the REST VOLs configure script +RV_OPTS="" + + +echo +echo "*************************" +echo "* REST VOL build script *" +echo "*************************" +echo + +usage() +{ + echo "usage: $0 [OPTIONS]" + echo + echo " -h Print this help message." + echo + echo " -d Enable debugging output in the REST VOL." + echo + echo " -c Enable cURL debugging output in the REST VOL." + echo + echo " -m Enable memory tracking in the REST VOL." + echo + echo " -g Enable symbolic debugging of the REST VOL code." + echo + echo " -P DIR Similar to 'configure --prefix=DIR', specifies" + echo " where the REST VOL should be installed to. Default" + echo " is 'source directory/rest_vol_build'." + echo + echo " -H DIR To specify a directory where HDF5 has already" + echo " been built." + echo + echo " -C DIR To specify the top-level directory where cURL is" + echo " installed, if cURL was not installed to a system" + echo " directory." + echo + echo " -Y DIR To specify the top-level directory where YAJL is" + echo " installed, if YAJL was not installed to a system" + echo " directory." + echo +} + +optspec=":hcgtdmH:C:Y:P:-" +while getopts "$optspec" optchar; do + case "${optchar}" in + h) + usage + exit 0 + ;; + d) + RV_OPTS="${RV_OPTS} --enable-build-mode=debug" + echo "Enabled connector debugging" + echo + ;; + c) + RV_OPTS="${RV_OPTS} --enable-curl-debug" + echo "Enabled cURL debugging" + echo + ;; + m) + RV_OPTS="${RV_OPTS} --enable-mem-tracking" + echo "Enabled connector memory tracking" + echo + ;; + g) + COMP_OPTS="-g ${COMP_OPTS}" + echo "Enabled symbolic debugging" + echo + ;; + P) + if [ "$HDF5_INSTALL_DIR" = "$INSTALL_DIR" ]; then + HDF5_INSTALL_DIR="$OPTARG" + echo "Set HDF5 install directory to: ${HDF5_INSTALL_DIR}" + fi + INSTALL_DIR="$OPTARG" + echo "Prefix set to: ${INSTALL_DIR}" + echo + ;; + H) + build_hdf5=false + HDF5_INSTALL_DIR="$OPTARG" + RV_OPTS="${RV_OPTS} --with-hdf5=${HDF5_INSTALL_DIR}" + echo "Set HDF5 install directory to: ${HDF5_INSTALL_DIR}" + echo + ;; + C) + CURL_DIR="$OPTARG" + CURL_LINK="-L${CURL_DIR}/lib ${CURL_LINK}" + RV_OPTS="${RV_OPTS} --with-curl=${CURL_DIR}" + COMP_OPTS="${COMP_OPTS} ${CURL_LINK}" + echo "Libcurl directory set to: ${CURL_DIR}" + echo + ;; + Y) + YAJL_DIR="$OPTARG" + YAJL_LINK="-L${YAJL_DIR}/lib ${YAJL_LINK}" + RV_OPTS="${RV_OPTS} --with-yajl=${YAJL_DIR}" + COMP_OPTS="${COMP_OPTS} ${YAJL_LINK}" + echo "Libyajl directory set to: ${YAJL_DIR}" + echo + ;; + *) + if [ "$OPTERR" != 1 ] || case $optspec in :*) ;; *) false; esac; then + echo "ERROR: non-option argument: '-${OPTARG}'" >&2 + echo + usage + echo + exit 1 + fi + ;; + esac +done + + +# Try to determine a good number of cores to use for parallelizing both builds +if [ "$NPROCS" -eq "0" ]; then + NPROCS=`getconf _NPROCESSORS_ONLN 2> /dev/null` + + # Handle FreeBSD + if [ -z "$NPROCS" ]; then + NPROCS=`getconf NPROCESSORS_ONLN 2> /dev/null` + fi +fi + +# Ensure that the HDF5 and VOL tests submodules get checked out +if [ -z "$(ls -A ${SCRIPT_DIR}/${HDF5_DIR})" ]; then + git submodule init + git submodule update +fi + +# If the user hasn't already, first build HDF5 +if [ "$build_hdf5" = true ]; then + echo "*****************" + echo "* Building HDF5 *" + echo "*****************" + echo + + cd "${SCRIPT_DIR}/${HDF5_DIR}" + + ./autogen.sh || exit 1 + + ./configure --prefix="${HDF5_INSTALL_DIR}" CFLAGS="${COMP_OPTS}" || exit 1 + + make -j${NPROCS} && make install || exit 1 +fi + + +# Once HDF5 has been built, build the REST VOL connector against HDF5. +echo "*******************************************" +echo "* Building REST VOL connector and test suite *" +echo "*******************************************" +echo + +mkdir -p "${INSTALL_DIR}" + +cd "${SCRIPT_DIR}" + +./autogen.sh || exit 1 + +./configure --prefix="${INSTALL_DIR}" ${RV_OPTS} CFLAGS="${COMP_OPTS}" || exit 1 + +make -j${NPROCS} && make install || exit 1 + +exit 0 diff --git a/build_vol_cmake.bat b/build_vol_cmake.bat new file mode 100644 index 00000000..2c58fddb --- /dev/null +++ b/build_vol_cmake.bat @@ -0,0 +1,13 @@ +rem +rem Copyright by The HDF Group. +rem All rights reserved. +rem +rem This file is part of the HDF5 REST VOL connector. The full copyright +rem notice, including terms governing use, modification, and redistribution, +rem is contained in the COPYING file, which can be found at the root of the +rem source code distribution tree. +rem +rem A script used to first configure and build the HDF5 source distribution +rem included with the REST VOL connector source code, and then use that built +rem HDF5 to build the REST VOL connector itself. + diff --git a/build_vol_cmake.sh b/build_vol_cmake.sh new file mode 100755 index 00000000..acc02abe --- /dev/null +++ b/build_vol_cmake.sh @@ -0,0 +1,227 @@ +#!/bin/sh +# +# Copyright by The HDF Group. +# All rights reserved. +# +# This file is part of the HDF5 REST VOL connector. The full copyright +# notice, including terms governing use, modification, and redistribution, +# is contained in the COPYING file, which can be found at the root of the +# source code distribution tree. +# +# A script used to first configure and build the HDF5 source distribution +# included with the REST VOL connector source code, and then use that built +# HDF5 to build the REST VOL connector itself. + +# Get the directory of the script itself +SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +# Set the default install directory +INSTALL_DIR="${SCRIPT_DIR}/rest_vol_build" + +# Set the default build directory +BUILD_DIR="${SCRIPT_DIR}/rest_vol_cmake_build_files" + +# By default, tell CMake to generate Unix Makefiles +CMAKE_GENERATOR="Unix Makefiles" + +# Determine the number of processors to use when +# building in parallel with Autotools make +NPROCS=0 + +# Extra compiler options passed to the various steps, such as -Wall +COMP_OPTS="-Wall -pedantic -Wunused-macros" + +# Extra options passed to the REST VOLs CMake script +CONNECTOR_DEBUG_OPT= +CURL_DEBUG_OPT= +MEM_TRACK_OPT= +THREAD_SAFE_OPT= +HDF5_INSTALL_DIR= +CURL_OPT= +YAJL_OPT= +YAJL_LIB_OPT= + +echo +echo "*************************" +echo "* REST VOL build script *" +echo "*************************" +echo + +usage() +{ + echo "usage: $0 [OPTIONS]" + echo + echo " -h Print this help message." + echo + echo " -d Enable debugging output in the REST VOL." + echo + echo " -c Enable cURL debugging output in the REST VOL." + echo + echo " -m Enable memory tracking in the REST VOL." + echo + echo " -s Enable linking to thread safe static hdf5 library." + echo + echo " -t Make use of the static YAJL library. Be aware the" + echo " library should be built with position independent" + echo " code option enabled." + echo + echo " -x Enable building of the REST VOL examples." + echo + echo " -G Specify the CMake Generator to use for the build" + echo " files created. Default is 'Unix Makefiles'." + echo + echo " -P DIR Similar to '-DCMAKE_INSTALL_PREFIX=DIR', specifies" + echo " where the REST VOL should be installed to. Default" + echo " is 'source directory/rest_vol_build'." + echo + echo " -H DIR To specify a directory where HDF5 has already" + echo " been installed." + echo + echo " -B DIR Specifies the directory that CMake should use as" + echo " the build tree location. Default is" + echo " 'source directory/rest_vol_cmake_build_files'." + echo " Note that the REST VOL does not support in-source" + echo " CMake builds." + echo + echo " -C DIR To specify the top-level directory where cURL is" + echo " installed, if cURL was not installed to a system" + echo " directory. Similar to '-DCURL_ROOT=DIR'." + echo + echo " -Y DIR To specify the top-level directory where YAJL is" + echo " installed, if YAJL was not installed to a system" + echo " directory. Similar to '-DYAJL_ROOT=DIR'." + echo +} + +optspec=":hctdmstxlG:H:C:Y:B:P:-" +while getopts "$optspec" optchar; do + case "${optchar}" in + h) + usage + exit 0 + ;; + d) + CONNECTOR_DEBUG_OPT="-DHDF5_VOL_REST_ENABLE_DEBUG=ON" + echo "Enabled connector debugging" + echo + ;; + c) + CURL_DEBUG_OPT="-DHDF5_VOL_REST_ENABLE_CURL_DEBUG=ON" + echo "Enabled cURL debugging" + echo + ;; + m) + MEM_TRACK_OPT="-DHDF5_VOL_REST_ENABLE_MEM_TRACKING=ON" + echo "Enabled connector memory tracking" + echo + ;; + s) + THREAD_SAFE_OPT="-DHDF5_VOL_REST_THREAD_SAFE=ON" + echo "Enabled linking to static thread safe hdf5 library" + echo + ;; + t) YAJL_LIB_OPT="-DYAJL_USE_STATIC_LIBRARIES=ON" + echo "Use the static YAJL library." + echo + ;; + G) + CMAKE_GENERATOR="$OPTARG" + echo "CMake Generator set to: ${CMAKE_GENERATOR}" + echo + ;; + B) + BUILD_DIR="$OPTARG" + echo "Build directory set to: ${BUILD_DIR}" + echo + ;; + P) + INSTALL_DIR="$OPTARG" + echo "Prefix set to: ${INSTALL_DIR}" + echo + ;; + H) + HDF5_INSTALL_DIR="$OPTARG" + echo "Set HDF5 install directory to: $OPTARG" + echo + ;; + C) + CURL_OPT="-DCURL_ROOT=$OPTARG" + echo "CURL_ROOT set to: ${OPTARG}" + echo + ;; + Y) + YAJL_OPT="-DYAJL_ROOT=$OPTARG" + echo "YAJL_ROOT set to: ${OPTARG}" + echo + ;; + *) + if [ "$OPTERR" != 1 ] || case $optspec in :*) ;; *) false; esac; then + echo "ERROR: non-option argument: '-${OPTARG}'" >&2 + echo + usage + echo + exit 1 + fi + ;; + esac +done + + +# Try to determine a good number of cores to use for parallelizing both builds +if [ "$NPROCS" -eq "0" ]; then + NPROCS=`getconf _NPROCESSORS_ONLN 2> /dev/null` + + # Handle FreeBSD + if [ -z "$NPROCS" ]; then + NPROCS=`getconf NPROCESSORS_ONLN 2> /dev/null` + fi +fi + +# Ensure that the vol-tests submodule gets checked out +if [ -z "$(ls -A ${SCRIPT_DIR}/test/vol-tests)" ]; then + git submodule init + git submodule update +fi + +# Build the REST VOL connector against HDF5. +echo "*******************************************" +echo "* Building REST VOL connector and test suite *" +echo "*******************************************" +echo + +mkdir -p "${BUILD_DIR}" +mkdir -p "${INSTALL_DIR}" + +# Clean out the old CMake cache +rm -f "${BUILD_DIR}/CMakeCache.txt" + +cd "${BUILD_DIR}" + +CFLAGS="-D_POSIX_C_SOURCE=200809L" cmake -G "${CMAKE_GENERATOR}" "-DHDF5_ROOT=${HDF5_INSTALL_DIR}" -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" "${CURL_OPT}" "${YAJL_OPT}" "${YAJL_LIB_OPT}" "${CONNECTOR_DEBUG_OPT}" "${CURL_DEBUG_OPT}" "${MEM_TRACK_OPT}" "${THREAD_SAFE_OPT}" "${SCRIPT_DIR}" + +echo "Build files have been generated for CMake generator '${CMAKE_GENERATOR}'" + +# Build with autotools make by default +if [ "${CMAKE_GENERATOR}" = "Unix Makefiles" ]; then + make -j${NPROCS} && make install || exit 1 +fi + +echo "REST VOL built" + +# Clean out the old CMake cache +rm -f "${BUILD_DIR}/CMakeCache.txt" + +# Configure vol-tests + +mkdir -p "${BUILD_DIR}/tests/vol-tests" +cd "${BUILD_DIR}/tests/vol-tests" + +CFLAGS="-D_POSIX_C_SOURCE=200809L" cmake -G "${CMAKE_GENERATOR}" "-DHDF5_DIR=${HDF5_INSTALL_DIR}" -DCMAKE_INSTALL_PREFIX="${INSTALL_DIR}" "${CONNECTOR_DEBUG_OPT}" "${CURL_DEBUG_OPT}" "${MEM_TRACK_OPT}" "${THREAD_SAFE_OPT}" "${SCRIPT_DIR}/test/vol-tests" + +echo "Build files generated for vol-tests" + +make || exit 1 + +echo "VOL tests built" + +exit 0 diff --git a/configure.ac b/configure.ac new file mode 100644 index 00000000..4fd4f614 --- /dev/null +++ b/configure.ac @@ -0,0 +1,1184 @@ +## Process this file with autoconf to produce configure. +## +## Copyright by The HDF Group. +## All rights reserved. +## +## This file is part of the HDF5 REST VOL connector. The full copyright +## notice, including terms governing use, modification, and redistribution, +## is contained in the COPYING file, which can be found at the root of the +## source code distribution tree. +## + +## ---------------------------------------------------------------------- +## Initialize configure. +## +AC_REVISION($Id: configure.ac 4d173db 2018-01-29 15:18:47Z jhenderson $) +AC_PREREQ([2.69]) + +## AC_INIT takes the name of the package, the version number, and an +## email address to report bugs. AC_CONFIG_SRCDIR takes a unique file +## as its argument. +## +## NOTE: Do not forget to change the version number here when we do a +## release!!! +## +AC_INIT([REST VOL], [1.0.0], [help@hdfgroup.org]) + +AC_CONFIG_SRCDIR([src/rest_vol.c]) +AC_CONFIG_HEADERS([src/rest_vol_config.h]) + +AC_CONFIG_AUX_DIR([bin]) +AC_CONFIG_MACRO_DIR([m4]) + +## AM_INIT_AUTOMAKE takes a list of options that should be applied to +## every Makefile.am when automake is run. +AM_INIT_AUTOMAKE([foreign subdir-objects]) +m4_ifdef([AM_SILENT_RULES], [AM_SILENT_RULES([yes])]) # use silent rules where available - automake 1.11 + +## AM_MAINTAINER_MODE turns off "rebuild rules" that contain dependencies +## for Makefiles, configure, src/H5config.h, etc. If AM_MAINTAINER_MODE +## is enabled, these files will be rebuilt if out of date. This is a +## problem because if users try to build on a machine with +## the wrong versions of autoconf and automake, these files will be +## rebuilt with the wrong versions and bad things can happen. +## Also, CVS doesn't preserve dependencies between timestamps, so +## Makefiles will often think rebuilding needs to occur when it doesn't. +## +## By default, it is enabled. Users can configure with +## --disable-maintainer-mode to prevent running the autotools. +AM_MAINTAINER_MODE([disable]) + +## ---------------------------------------------------------------------- +## Set prefix default (install directory) to a directory in the build +## area. This allows multiple src-dir builds within one host. +AC_PREFIX_DEFAULT([`pwd`/rest_vol_build]) + +if test "${prefix}" = NONE; then + prefix=$ac_default_prefix +fi + +## Run post processing on files created by configure. +## rest_vol_config.h: +## Re-generate rest_vol_config.h by prepending RV_ to all macro names in +## order to avoid name conflicts between the REST VOL macro names and the +## macro names possibly generated by software that uses the REST VOL. +## src/libhdf5_vol_rest.settings: +## Remove all lines begun with "#" which are generated by CONDITIONALs of +## configure. +AC_CONFIG_COMMANDS([pubconf], [ + echo "editing src/rest_vol_config.h" + sed 's/#define /#define RV_/' < src/rest_vol_config.h |\ + sed 's/#undef /#undef RV_/' > src/pubconf + if (diff src/pubconf src/rest_vol_config.h); then + rm -f src/pubconf + echo "src/rest_vol_config.h is unchanged" + else + /bin/mv -f src/pubconf src/rest_vol_config.h + fi + echo "Post processing src/libhdf5_vol_rest.settings" + sed '/^#/d' < src/libhdf5_vol_rest.settings > libhdf5_vol_rest.settings.TMP + cp libhdf5_vol_rest.settings.TMP src/libhdf5_vol_rest.settings + rm -f libhdf5_vol_rest.settings.TMP +]) + +if test "${libdir}" = NONE; then + libdir=$ac_default_libdir +fi + +## ----------------------------------------------------------------------- +## Set lib dir suffix. By default, search for required libraries in 'lib' +## on 32-bit machines and 'lib64' on 64-bit machines +if test `getconf LONG_BIT` = 64; then + LIB_SUFFIX="64" +else + LIB_SUFFIX="32" +fi + +AC_SUBST([LIB_SUFFIX]) + +## ----------------------------------------------------------------------- +## Check to make sure cURL and YAJL are available +AC_SEARCH_LIBS(curl_easy_setopt, curl, [], [AC_ERROR([A working libcurl is required])]) +AC_SEARCH_LIBS(yajl_tree_get, yajl, [], [AC_ERROR([A working libyajl is required])]) + + +AC_CANONICAL_HOST +AC_SUBST([CPPFLAGS]) +AC_SUBST([AR_FLAGS]) + +## H5_CFLAGS (and company) are for CFLAGS that should be used on HDF5, but +## not exported to h5cc (or h5fc, etc.) +AC_SUBST([H5_CFLAGS]) +AC_SUBST([H5_CPPFLAGS]) +AC_SUBST([H5_LDFLAGS]) + +## RV_CFLAGS are used for building the REST VOL and the test suite +AC_SUBST([RV_CFLAGS]) +AC_SUBST([RV_CPPFLAGS]) +AC_SUBST([RV_LDFLAGS]) + +## AM_CFLAGS (and company) are for CFLAGS that should be used on HDF5, +## and WILL be exported to h5cc (or h5fc, etc) if set by configure. +AC_SUBST([AM_CFLAGS]) +AC_SUBST([AM_CPPFLAGS]) +AC_SUBST([AM_LDFLAGS]) + +## Make sure flags are initialized. +AM_CFLAGS="${AM_CFLAGS}" +AM_CPPFLAGS="${AM_CPPFLAGS}" +AM_LDFLAGS="${AM_LDFLAGS}" +CFLAGS="${CFLAGS}" +CPPFLAGS="${CPPFLAGS}" +LDFLAGS="${LDFLAGS}" +AR_FLAGS="${AR_FLAGS}" + +## Configure may need to alter any of the *FLAGS variables in order for +## various checks to work correctly. Save the user's value here so it +## can be restored once all configure checks are complete. +saved_user_CFLAGS="$CFLAGS" +saved_user_LDFLAGS="$LDFLAGS" +saved_user_CPPFLAGS="$CPPFLAGS" + +## ---------------------------------------------------------------------- +## Dump all shell variables values. +## +AC_MSG_CHECKING([shell variables initial values]) +set >&AS_MESSAGE_LOG_FD +AC_MSG_RESULT([done]) + +## ---------------------------------------------------------------------- +## Save system information for the library settings file. +## +AC_SUBST([UNAME_INFO]) +UNAME_INFO=`uname -a` + +## ---------------------------------------------------------------------- +## Some platforms have broken basename, and/or xargs programs. Check +## that it actually does what it's supposed to do. Catch this early +## since configure and scripts relies upon them heavily and there's +## no use continuing if it's broken. +## +AC_MSG_CHECKING([if basename works]) +BASENAME_TEST="`basename /foo/bar/baz/qux/basename_works`" +if test $BASENAME_TEST != "basename_works"; then + AC_MSG_ERROR([basename program doesn't work]) +else + AC_MSG_RESULT([yes]) +fi + +## xargs basename used in configure to get the CC_BASENAME value +AC_MSG_CHECKING([if xargs works]) +XARGS_TEST="`echo /foo/bar/baz/qux/xargs_works | xargs basename`" +if test $XARGS_TEST != "xargs_works"; then + AC_MSG_ERROR([xargs program doesn't work]) +else + AC_MSG_RESULT([yes]) +fi + +## ---------------------------------------------------------------------- +## Check that the cache file was build on the same host as what we're +## running on now. +## +AC_CACHE_CHECK([for cached host], [hdf5_cv_host], [hdf5_cv_host="none"]); +if test $hdf5_cv_host = "none"; then + hdf5_cv_host=$host +elif test $hdf5_cv_host != $host; then + echo "The config.cache file was generated on $hdf5_cv_host but" + echo "this is $host. Please remove that file and try again." + AC_MSG_ERROR([config.cache file is invalid]) +fi + +## ---------------------------------------------------------------------- +## Source any special files that we need. These files normally aren't +## present but can be used by the maintainers to fine tune things like +## turning on debug or profiling flags for the compiler. The search order +## is: +## +## CPU-VENDOR-OS +## VENDOR-OS +## CPU-OS +## CPU-VENDOR +## OS +## VENDOR +## CPU +## +## If the `OS' ends with a version number then remove it. For instance, +## `freebsd3.1' would become `freebsd' + +case $host_os in + aix*) + host_os_novers=aix + ;; + freebsd*) + host_os_novers=freebsd + ;; + solaris*) + host_os_novers=solaris + ;; + *) + host_os_novers=$host_os + ;; +esac + +host_config="none" +for f in $host_cpu-$host_vendor-$host_os \ + $host_cpu-$host_vendor-$host_os_novers \ + $host_vendor-$host_os \ + $host_vendor-$host_os_novers \ + $host_cpu-$host_os \ + $host_cpu-$host_os_novers \ + $host_cpu-$host_vendor \ + $host_os \ + $host_os_novers \ + $host_vendor \ + $host_cpu ; do + AC_MSG_CHECKING([for config $f]) + if test -f "$srcdir/config/$f"; then + host_config=$srcdir/config/$f + AC_MSG_RESULT([found]) + break + fi + AC_MSG_RESULT([no]) +done +if test "X$host_config" != "Xnone"; then + CC_BASENAME="`echo $CC | cut -f1 -d' ' | xargs basename 2>/dev/null`" + . $host_config +fi + +## Source any special site-specific file +hname="`hostname`" +while test -n "$hname"; do + file=$srcdir/config/site-specific/host-$hname + AC_MSG_CHECKING([for config $file]) + if test -f "$file"; then + . $file + AC_MSG_RESULT([found]) + break + fi + AC_MSG_RESULT([no]) + hname_tmp=$hname + hname="`echo $hname | cut -d. -f2-99`" + test "$hname_tmp" = "$hname" && break +done + + +## ---------------------------------------------------------------------- +## Determine build mode (debug, production, clean). +## This has to be done early since the build mode is referred to +## frequently. +## +AC_MSG_CHECKING([build mode]) +AC_ARG_ENABLE([build-mode], + [AS_HELP_STRING([--enable-build-mode=(debug|production)], + [Sets the build mode. Debug turns on debug + output printing. + Production turns high optimizations on. + All these settings can be overridden by using + specific configure flags. + [default=production] + ])], + [BUILD_MODE=$enableval]) + +## Set the default +## Depends on branch, set via script at branch creation time +if test "X-$BUILD_MODE" = X- ; then + BUILD_MODE=production +fi + +# Add in conditional for whether debug mode is enabled +AM_CONDITIONAL([DEBUG_ENABLED], [test "X-$BUILD_MODE" = "X-debug"]) + +## Allow this variable to be substituted in +## other files (src/libhdf5.settings.in, etc.) +AC_SUBST([BUILD_MODE]) + +case "X-$BUILD_MODE" in + X-debug) + AC_DEFINE([CONNECTOR_DEBUG], , [Define to have the REST VOL print out debugging information.]) + H5_CFLAGS="$H5_CFLAGS $DEBUG_CFLAGS" + H5_CPPFLAGS="$H5_CPPFLAGS $DEBUG_CPPFLAGS" + AC_MSG_RESULT([debug]) + ;; + X-production) + H5_CFLAGS="$H5_CFLAGS $PROD_CFLAGS" + H5_CPPFLAGS="$H5_CPPFLAGS $PROD_CPPFLAGS" + AC_MSG_RESULT([production]) + ;; + *) + AC_MSG_ERROR([Unrecognized build mode: $BUILD_MODE. Use debug or production.]) +esac + +## ---------------------------------------------------------------------- +## Some built-in configure checks can only see CFLAGS (not AM_CFLAGS), so +## we need to add this in so configure works as intended. We will need to +## reset this value at the end of configure, to preserve the user's settings. +CFLAGS="${AM_CFLAGS} ${CFLAGS}" +CPPFLAGS="${AM_CPPFLAGS} ${CPPFLAGS}" +LDFLAGS="${AM_LDFLAGS} ${LDFLAGS}" + +## ---------------------------------------------------------------------- +## Enable dependency tracking unless the configure options or a +## site-specific file told us not to. This prevents configure from +## silently disabling dependencies for some compilers. +## +if test -z "${enable_dependency_tracking}"; then + enable_dependency_tracking="yes" +fi + +## ---------------------------------------------------------------------- +## Check for programs. +## +AC_PROG_CC +CC_BASENAME="`echo $CC | cut -f1 -d' ' | xargs basename 2>/dev/null`" + + +## ---------------------------------------------------------------------- +## Data types and their sizes. +## +AC_TYPE_OFF_T +AC_CHECK_TYPE([size_t], [], + [AC_DEFINE_UNQUOTED([size_t], [unsigned long], + [Define to `unsigned long' if does not define.])]) +AC_CHECK_TYPE([ssize_t], [], + [AC_DEFINE_UNQUOTED([ssize_t], [long], + [Define to `long' if does not define.])]) +AC_CHECK_TYPE([ptrdiff_t], [], + [AC_DEFINE_UNQUOTED([ptrdiff_t], [long], + [Define to `long' if does not define.])]) +AC_C_BIGENDIAN + + +## ---------------------------------------------------------------------- +## Check which archiving tool to use. This needs to be done before +## the AM_PROG_LIBTOOL macro. +## +if test -z "$AR"; then + AC_CHECK_PROGS([AR], [ar xar], [:], [$PATH]) +fi +AC_SUBST([AR]) + +# Set the default ar flags to cr +# The Automake default is to use cru and the 'u' causes ar +# to emit warnings on some platforms. +AR_FLAGS=cr + + +## Export the AR macro so that it will be placed in the libtool file +## correctly. +export AR + +AC_PROG_MAKE_SET +AC_PROG_INSTALL + + +## ---------------------------------------------------------------------- +## Set up ${TR} which is used to process the package list for extra +## debugging output in the C library. + +AC_PATH_PROG([TR], [tr]) + + +## ---------------------------------------------------------------------- +## Disable shared libraries on CYGWIN. (LK - 04/16/15) +## A number of tests run by "make check" fail on CYGWIN, so for HDF5 v1.8.15 +## we will change the default for shared libraries to disabled. + + +case "`uname`" in + CYGWIN*) + enable_shared="no" + CHECK_WARN="Shared libraries are not currently supported on CYGWIN." + ;; +esac + +## ---------------------------------------------------------------------- +## Create libtool. If shared/static libraries are going to be enabled +## or disabled, it should happen before these macros. +LT_PREREQ([2.2]) + +## ---------------------------------------------------------------------- +## dlopen - This will use an improved version of libtool +## win32-dll - This will build clean dlls on win32 platforms. +LT_INIT([dlopen,win32-dll]) + +## ---------------------------------------------------------------------- +## Check if we should install only statically linked executables. +## This check needs to occur after libtool is initialized because +## we check a libtool cache value and may issue a warning based +## on its result. +AC_SUBST([STATIC_EXEC]) + +## Default is no +STATIC_EXEC=no + +AC_MSG_CHECKING([if we should install only statically linked executables]) +AC_ARG_ENABLE([static_exec], + [AS_HELP_STRING([--enable-static-exec], + [Install only statically linked executables + [default=no]])], + [STATIC_EXEC=$enableval]) + +if test "X$STATIC_EXEC" = "Xyes"; then + echo "yes" + ## Issue a warning if -static flag is not supported. + if test "X$lt_cv_prog_compiler_static_works" = "Xno"; then + echo " warning: -static flag not supported on this system; executable won't statically link shared system libraries." + LT_STATIC_EXEC="" + else + LT_STATIC_EXEC="-all-static" + fi +else + echo "no" + LT_STATIC_EXEC="" +fi +AM_CONDITIONAL([USE_PLUGINS_CONDITIONAL], [test "X$LT_STATIC_EXEC" = X]) + +AC_SUBST([LT_STATIC_EXEC]) + +## Fix up the INSTALL macro if it's a relative path. We want the +## full-path to the binary instead. +case "$INSTALL" in + *install-sh*) + INSTALL='\${top_srcdir}/bin/install-sh -c' + ;; +esac + +## ---------------------------------------------------------------------- +## Some users have reported problems with libtool's use of '-Wl,-rpath' to +## link shared libraries in nondefault directories. Allow users to +## disable embedding the rpath information in the executables and to +## instead solely rely on the information in LD_LIBRARY_PATH. +AC_MSG_CHECKING([if -Wl,-rpath should be used to link shared libs in nondefault directories]) +AC_ARG_ENABLE([sharedlib-rpath], + [AS_HELP_STRING([--disable-sharedlib-rpath], + [Disable use of the '=Wl,-rpath' linker option])], + [RPATH=$enableval]) + +case "X-$RPATH" in + X-no) + AC_MSG_RESULT([no]) + runpath_var= + hardcode_libdir_flag_spec= + hardcode_libdir_flag_spec_ld= + hardcode_into_libs=no + ;; + X-|X-yes) + AC_MSG_RESULT([yes]) + ;; + *) + AC_MSG_RESULT([error]) + AC_MSG_ERROR([\'$enableval\' is not a valid rpath type]) + ;; +esac + +## ---------------------------------------------------------------------- +## Check for system libraries. "dl" stands for dynamically loaded library +## +AC_CHECK_LIB([m], [ceil]) +AC_CHECK_LIB([dl], [dlopen]) + +## ---------------------------------------------------------------------- +## Check for system header files. +## +AC_HEADER_STDC +AC_HEADER_TIME + +## Unix +AC_CHECK_HEADERS([sys/resource.h sys/time.h unistd.h sys/ioctl.h sys/stat.h]) +AC_CHECK_HEADERS([sys/socket.h sys/types.h sys/file.h]) +AC_CHECK_HEADERS([stddef.h setjmp.h features.h]) +AC_CHECK_HEADERS([dirent.h]) +AC_CHECK_HEADERS([stdint.h], [C9x=yes]) +AC_CHECK_HEADERS([stdbool.h]) + +## Darwin +AC_CHECK_HEADERS([mach/mach_time.h]) +## Also need to detect Darwin for pubconf +##case $host_os in +## darwin*) +## AC_DEFINE([HAVE_DARWIN], [1], [Define if Darwin or Mac OS X]) +## ;; +##esac + +## Windows +##case "`uname`" in +## CYGWIN*) +## AC_CHECK_HEADERS([io.h sys/timeb.h]) +## UNAME_CYGWIN="yes" +## ;; +## MINGW*) +## AC_CHECK_HEADERS([io.h winsock2.h sys/timeb.h]) +## AC_HAVE_LIBRARY([ws2_32]) +## ;; +## *) +## AC_CHECK_HEADERS([io.h winsock2.h sys/timeb.h]) +## ;; +##esac + +## ---------------------------------------------------------------------- +## Some platforms require that all symbols are resolved when a library +## is linked. We can use the -no-undefined flag to tell libtool that +## it will be able to build shared libraries on these architectures, +## as it will not do so by default. +## +if test "X${enable_shared}" = "Xyes"; then + AC_MSG_CHECKING([if libtool needs -no-undefined flag to build shared libraries]) + case "`uname`" in + CYGWIN*|MINGW*|AIX*) + ## Add in the -no-undefined flag to LDFLAGS for libtool. + AC_MSG_RESULT([yes]) + H5_LDFLAGS="$H5_LDFLAGS -no-undefined" + ;; + *) + ## Don't add in anything. + AC_MSG_RESULT([no]) + ;; + esac +fi + +## ---------------------------------------------------------------------- +## Add necessary defines for Linux Systems. +## +case "$host_cpu-$host_vendor-$host_os" in + *linux*) + ## Add POSIX support on Linux systems, so defines + ## __USE_POSIX, which is required to get the prototype for fdopen + ## defined correctly in . + ## + ## This flag was removed from h5cc as of 2009-10-17 when it was found + ## that the flag broke compiling netCDF-4 code with h5cc, but kept in + ## H5_CPPFLAGS because fdopen and HDfdopen fail without it. HDfdopen + ## is used only by H5_debug_mask which is used only when debugging in + ## H5_init_library (all in H5.c). When the flag was removed this was + ## the only compile failure noted. + ## + ## This was originally defined as _POSIX_SOURCE which was updated to + ## _POSIX_C_SOURCE=199506L to expose a greater amount of POSIX + ## functionality so clock_gettime and CLOCK_MONOTONIC are defined + ## correctly. This was later updated to 200112L so that + ## posix_memalign() is visible for the direct VFD code on Linux + ## systems. + ## + ## POSIX feature information can be found in the gcc manual at: + ## http://www.gnu.org/s/libc/manual/html_node/Feature-Test-Macros.html + H5_CPPFLAGS="-D_POSIX_C_SOURCE=200112L $H5_CPPFLAGS" + + ## Need to add this so that O_DIRECT is visible for the direct + ## VFD on Linux systems. + H5_CPPFLAGS="-D_GNU_SOURCE $H5_CPPFLAGS" + ;; +esac + +## Need to add the AM_ and H5_ into CFLAGS/CPPFLAGS to make them visible +## for configure checks. +## Note: Both will be restored by the end of configure. +CPPFLAGS="$H5_CPPFLAGS $AM_CPPFLAGS $CPPFLAGS" +CFLAGS="$H5_CFLAGS $AM_CFLAGS $CFLAGS" + +## Checkpoint the cache +AC_CACHE_SAVE + +## Posix.1g types (C9x) +cat >>confdefs.h <<\EOF +#include +EOF + +if test "X$C9x" = "Xyes"; then + cat >>confdefs.h <<\EOF +#include +EOF +fi + +cat >>confdefs.h <<\EOF +#include /*for off_t definition*/ +EOF +AC_CHECK_SIZEOF([off_t]) + +if test "X$C9x" = "Xyes"; then + cat >>confdefs.h <<\EOF +#ifdef HAVE_STDBOOL_H +#include /* for bool definition */ +#else +#define bool _Bool +#endif +EOF +AC_CHECK_SIZEOF([bool]) +fi + +## Checkpoint the cache +AC_CACHE_SAVE + +## ---------------------------------------------------------------------- +## Check for functions. +AC_CHECK_FUNCS([snprintf]) + +## ---------------------------------------------------------------------- +## Check compiler characteristics +## +AC_C_CONST + +## ---------------------------------------------------------------------- +## Check if cURL debugging output should be enabled +## +AC_MSG_CHECKING([enable cURL debugging]) +AC_ARG_ENABLE([curl-debug], + [AS_HELP_STRING([--enable-curl-debug], + [Enable cURL debugging output. + [default=no] + ])], + [CURL_DEBUG=$enableval]) + +## Set default +if test "X-$CURL_DEBUG" = X- ; then + CURL_DEBUG=no +fi + +## Allow this variable to be substituted in other files +AC_SUBST([CURL_DEBUG]) + +case "X-$CURL_DEBUG" in + X-yes) + AC_DEFINE([CURL_DEBUG], , [Define to have cURL output debugging information.]) + AC_MSG_RESULT([yes]) + ;; + X-no) + AC_MSG_RESULT([no]) + ;; + *) + AC_MSG_ERROR([Unrecognized curl debug option: $CURL_DEBUG.]) + ;; +esac + +## ---------------------------------------------------------------------- +## Check if REST VOL memory tracking should be enabled +## +AC_MSG_CHECKING([enable memory tracking]) +AC_ARG_ENABLE([mem-tracking], + [AS_HELP_STRING([--enable-mem-tracking], + [Enable REST VOL memory tracking. This helps to + find memory leaks and other memory errors within + the connector itself. If this option is enabled, + the connector will throw an error upon termination + if memory is still allocated. + [default=no] + ])], + [TRACK_MEM_USAGE=$enableval]) + +## Set default +if test "X-$TRACK_MEM_USAGE" = X- ; then + TRACK_MEM_USAGE=no +fi + +## Allow this variable to be substituted in other files +AC_SUBST([TRACK_MEM_USAGE]) + +case "X-$TRACK_MEM_USAGE" in + X-yes) + AC_DEFINE([TRACK_MEM_USAGE], , [Define to have the REST VOL track memory usage.]) + AC_MSG_RESULT([yes]) + ;; + X-no) + AC_MSG_RESULT([no]) + ;; + *) + AC_MSG_ERROR([Unrecognized track mem usage option: $TRACK_MEM_USAGE.]) + ;; +esac + +## ---------------------------------------------------------------------- +## Check if the install directory for the included HDF5 source +## distribution has been modified +## +AC_MSG_CHECKING([HDF5 source distribution install directory]) +AC_ARG_WITH([hdf5], + [AS_HELP_STRING([--with-hdf5[=DIR]], + [To specify the location of a pre-built HDF5 + to use when building the REST VOL.])], + [RV_HDF5_INSTALL_DIR=$withval]) + +## Set default +if test "X-$RV_HDF5_INSTALL_DIR" = X- ; then + RV_HDF5_INSTALL_DIR="${prefix}" +fi + +## Allow this variable to be substituted in other files +AC_SUBST([RV_HDF5_INSTALL_DIR]) +AC_MSG_RESULT([$RV_HDF5_INSTALL_DIR]) + +## ---------------------------------------------------------------------- +## Check if the install directory for cURL should be modified +## +AC_MSG_CHECKING([cURL install directory]) +AC_ARG_WITH([curl], + [AS_HELP_STRING([--with-curl[=DIR]], + [To specify the top-level directory where cURL is + installed, if cURL was not installed to a system + path.])], + [RV_CURL_INSTALL_DIR=$withval]) + +## Set default +if test "X-$RV_CURL_INSTALL_DIR" = X- ; then + RV_CURL_INSTALL_DIR="" +fi + +AM_CONDITIONAL([CURL_DEFAULT_DIR], [test -z "$RV_CURL_INSTALL_DIR"]) + +## Allow this variable to be substituted in other files +AC_SUBST([RV_CURL_INSTALL_DIR]) +AC_MSG_RESULT([$RV_CURL_INSTALL_DIR]) + +## ---------------------------------------------------------------------- +## Check if the install directory for YAJL should be modified +## +AC_MSG_CHECKING([YAJL install directory]) +AC_ARG_WITH([yajl], + [AS_HELP_STRING([--with-yajl[=DIR]], + [To specify the top-level directory where YAJL is + installed, if YAJL was not installed to a system + path.])], + [RV_YAJL_INSTALL_DIR=$withval]) + +## Set default +if test "X-$RV_YAJL_INSTALL_DIR" = X- ; then + RV_YAJL_INSTALL_DIR="" +fi + +AM_CONDITIONAL([YAJL_DEFAULT_DIR], [test -z "$RV_YAJL_INSTALL_DIR"]) + +## Allow this variable to be substituted in other files +AC_SUBST([RV_YAJL_INSTALL_DIR]) +AC_MSG_RESULT([$RV_YAJL_INSTALL_DIR]) + +## ---------------------------------------------------------------------- +## Check if the REST VOL tests should be built +## +AC_MSG_CHECKING([enable tests]) +AC_ARG_ENABLE([tests], + [AS_HELP_STRING([--enable-tests=(yes|no|)], + [Enable building of the REST VOL HDF5 tests. + [default=yes] + ])], + [BUILD_TESTS=$enableval]) + +## Set default +if test "X-$BUILD_TESTS" = X- ; then + BUILD_TESTS=yes +fi + +## Allow this variable to be substituted in other files +AC_SUBST([BUILD_TESTS]) + +AM_CONDITIONAL([BUILD_TESTS], [test "$BUILD_TESTS" = yes]) + +case "X-$BUILD_TESTS" in + X-yes) + AC_MSG_RESULT([yes]) + ;; + X-no) + AC_MSG_RESULT([no]) + ;; + *) + AC_MSG_RESULT([$BUILD_TESTS]) + ;; +esac + +## ---------------------------------------------------------------------- +## Check if the REST VOL examples should be built +## +AC_MSG_CHECKING([enable examples]) +AC_ARG_ENABLE([examples], + [AS_HELP_STRING([--enable-examples=(yes|no|)], + [Enable building of the REST VOL HDF5 examples. + [default=yes] + ])], + [BUILD_EXAMPLES=$enableval]) + +## Set default +if test "X-$BUILD_EXAMPLES" = X- ; then + BUILD_EXAMPLES=yes +fi + +## Allow this variable to be substituted in other files +AC_SUBST([BUILD_EXAMPLES]) + +AM_CONDITIONAL([BUILD_EXAMPLES], [test "$BUILD_EXAMPLES" = yes]) + +case "X-$BUILD_EXAMPLES" in + X-yes) + AC_MSG_RESULT([yes]) + ;; + X-no) + AC_MSG_RESULT([no]) + ;; + *) + AC_MSG_RESULT([$BUILD_EXAMPLES]) + ;; +esac + +## ---------------------------------------------------------------------- +## Check if the HDF5 tools should be built with REST VOL support +## +AC_MSG_CHECKING([enable tools]) +AC_ARG_ENABLE([tools], + [AS_HELP_STRING([--enable-tools=(yes|no|)], + [Enable building of the HDF5 tools with REST VOL support. (highly experimental) + [default=no] + ])], + [BUILD_TOOLS=$enableval]) + +## Set default +if test "X-$BUILD_TOOLS" = X- ; then + BUILD_TOOLS=no +fi + +## Allow this variable to be substituted in other files +AC_SUBST([BUILD_TOOLS]) + +AM_CONDITIONAL([BUILD_TOOLS], [test "$BUILD_TOOLS" = yes]) + +case "X-$BUILD_TOOLS" in + X-yes) + AC_MSG_RESULT([yes]) + ;; + X-no) + AC_MSG_RESULT([no]) + ;; + *) + AC_MSG_RESULT([$BUILD_TOOLS]) + ;; +esac + +## ---------------------------------------------------------------------- +## Check if the compiler should include symbols +## +AC_MSG_CHECKING([enable debugging symbols]) +AC_ARG_ENABLE([symbols], + [AS_HELP_STRING([--enable-symbols=(yes|no|)], + [Add debug symbols to the library (e.g.: build with -g). + This is independent of the build mode and optimization + level. The custom string allows special settings like + -ggdb, etc. to be used. + [default=yes if debug build, otherwise no] + ])], + [SYMBOLS=$enableval]) + +## Set default +if test "X-$SYMBOLS" = X- ; then + if test "X-$BUILD_MODE" = "X-debug" ; then + SYMBOLS=yes + else + SYMBOLS=no + fi +fi + +## Allow this variable to be substituted in other files +AC_SUBST([SYMBOLS]) + +case "X-$SYMBOLS" in + X-yes) + H5_CFLAGS="$H5_CFLAGS $SYMBOLS_CFLAGS" + AC_MSG_RESULT([yes]) + ;; + X-no) + H5_CFLAGS="$H5_CFLAGS $NO_SYMBOLS_CFLAGS" + AC_MSG_RESULT([no]) + ;; + *) + H5_CFLAGS="$H5_CFLAGS $SYMBOLS" + SYMBOLS="custom ($SYMBOLS)" + AC_MSG_RESULT([$SYMBOLS]) + ;; +esac + +## ---------------------------------------------------------------------- +## Check if the assert macro should be enabled +## +AC_MSG_CHECKING([enable asserts]) +AC_ARG_ENABLE([asserts], + [AS_HELP_STRING([--enable-asserts], + [Determines whether NDEBUG is defined or not, which + controls assertions. + This is independent of the build mode and presence + of debugging symbols. + [default=yes if debug build, otherwise no] + ])], + [ASSERTS=$enableval]) + +## Set default +if test "X-$ASSERTS" = X- ; then + if test "X-$BUILD_MODE" = "X-debug" ; then + ASSERTS=yes + else + ASSERTS=no + fi +fi + +## Allow this variable to be substituted in other files +AC_SUBST([ASSERTS]) + +case "X-$ASSERTS" in + X-yes) + H5_CPPFLAGS="$H5_CPPFLAGS -UNDEBUG" + AC_MSG_RESULT([yes]) + ;; + X-no) + H5_CPPFLAGS="$H5_CPPFLAGS -DNDEBUG" + AC_MSG_RESULT([no]) + ;; + *) + AC_MSG_ERROR([Unrecognized value: $ASSERTS]) + ;; +esac + +## ---------------------------------------------------------------------- +## Check if the compiler should use profiling flags/settings +## +AC_MSG_CHECKING([profiling]) +AC_ARG_ENABLE([profiling], + [AS_HELP_STRING([--enable-profiling=(yes|no|)], + [Enable profiling flags (e.g.: -pg). + This can be set independently from the build mode. + The custom setting can be used to pass alternative + profiling flags (e.g.: -P for using Prof with gcc). + [default=no] + ])], + [PROFILING=$enableval]) + +## Default is no profiling +if test "X-$PROFILING" = X- ; then + PROFILING=no +fi + +## Allow this variable to be substituted in other files +AC_SUBST([PROFILING]) + +case "X-$PROFILING" in + X-yes) + H5_CFLAGS="$H5_CFLAGS $PROFILE_CFLAGS" + AC_MSG_RESULT([yes]) + ;; + X-no) + AC_MSG_RESULT([no]) + ;; + *) + H5_CFLAGS="$H5_CFLAGS $PROFILING" + PROFILING="custom ($PROFILING)" + AC_MSG_RESULT([$PROFILING]) + ;; +esac + +## ---------------------------------------------------------------------- +## Check if the compiler should use a particular optimization setting +## +AC_MSG_CHECKING([optimization level]) +AC_ARG_ENABLE([optimization], + [AS_HELP_STRING([--enable-optimization=(high|debug|none|)], + [Enable optimization flags/settings (e.g.: -O3). + This can be set independently from the build mode. + Optimizations for a given compiler can be specified + at several levels: High, with aggressive optimizations + turned on; debug, with optimizations that are + unlikely to interfere with debugging or profiling; + and none, with no optimizations at all. + See the compiler-specific config/*-flags file for more + details. + Alternatively, optimization options can + be specified directly by specifying them as a + string value. These custom optimization flags will + completely replace all other optimization flags. + [default depends on build mode: debug=debug, + production=high, clean=none] + ])], + [OPTIMIZATION=$enableval]) + +## Set the default optimization level. This depends on the compiler mode. +if test "X-$OPTIMIZATION" = X- ; then + case "X-$BUILD_MODE" in + X-debug) + OPTIMIZATION=debug + ;; + X-production) + OPTIMIZATION=high + ;; + X-clean) + OPTIMIZATION=none + ;; + esac +fi + +## Allow this variable to be substituted in other files +AC_SUBST([OPTIMIZATION]) + +case "X-$OPTIMIZATION" in + X-high) + H5_CFLAGS="$H5_CFLAGS $HIGH_OPT_CFLAGS" + AC_MSG_RESULT([high]) + ;; + X-debug) + H5_CFLAGS="$H5_CFLAGS $DEBUG_OPT_CFLAGS" + AC_MSG_RESULT([debug]) + ;; + X-none) + H5_CFLAGS="$H5_CFLAGS $NO_OPT_CFLAGS" + AC_MSG_RESULT([none]) + ;; + *) + H5_CFLAGS="$H5_CFLAGS $OPTIMIZATION" + OPTIMIZATION="custom ($OPTIMIZATION)" + AC_MSG_RESULT([$OPTIMIZATION]) + ;; +esac + +## Checkpoint the cache +AC_CACHE_SAVE + +## ---------------------------------------------------------------------- +## Set some variables for general configuration information to be saved +## and installed with the libraries (used to generate libhdf5_vol_rest.settings). +## + +## REST VOL version from the README.md file. +RV_VERSION="`cut -d' ' -f6 $(pwd)/README.md | head -3 | tail -1`" +AC_SUBST([RV_VERSION]) + +## Configuration date +AC_SUBST([CONFIG_DATE]) CONFIG_DATE="`date`" + +## User doing the configuration +AC_SUBST([CONFIG_USER]) CONFIG_USER="`whoami`@`hostname`" +if test -n "$ORGANIZATION"; then + CONFIG_USER="$CONFIG_USER at $ORGANIZATION" +fi + +## Configuration mode (production, debug, etc.) saved above. +AC_SUBST([CONFIG_MODE]) + +## Byte sex from the AC_C_BIGENDIAN macro. +AC_SUBST([BYTESEX]) +if test "X$ac_cv_c_bigendian" = "Xyes"; then + BYTESEX="big-endian" +else + BYTESEX="little-endian" +fi + + +if test "X$ac_cv_c_bigendian" = "Xyes"; then + WORDS_BIGENDIAN="yes" +else + WORDS_BIGENDIAN="no" +fi +AC_SUBST([WORDS_BIGENDIAN]) + +## Compiler with version information. This consists of the full path +## name of the compiler and the reported version number. +AC_SUBST([CC_VERSION]) +## Strip anything that looks like a flag off of $CC +CC_NOFLAGS=`echo $CC | sed 's/ -.*//'` + +if `echo $CC_NOFLAGS | grep ^/ >/dev/null 2>&1`; then + CC_VERSION="$CC" +else + CC_VERSION="$CC"; + for x in `echo $PATH | sed -e 's/:/ /g'`; do + if test -x $x/$CC_NOFLAGS; then + CC_VERSION="$x/$CC" + break + fi + done +fi +if test -n "$cc_version_info"; then + CC_VERSION="$CC_VERSION ( $cc_version_info)" +fi + +## ---------------------------------------------------------------------- +## Where is the root of the source tree. Give an absolute address so +## we can find it no matter which directory of the distribution is our +## current directory. The built-in pwd fails on some systems, but the +## /bin/pwd version works OK. +## +if test -x /bin/pwd; then + pwd=/bin/pwd +else + pwd=pwd +fi +AC_SUBST([ROOT]) ROOT="`$pwd`" + + +## ---------------------------------------------------------------------- +## Restore user's CFLAGS. +CFLAGS="$saved_user_CFLAGS" +CPPFLAGS="$saved_user_CPPFLAGS" +LDFLAGS="$saved_user_LDFLAGS" + +## ---------------------------------------------------------------------- +## Build the Makefiles. +## + +## The directory search list +AC_SUBST([SEARCH]) SEARCH='$(srcdir) $(top_builddir)/src $(top_srcdir)/src' +export SEARCH + +## Some cleanup stuff +rm -f conftest conftest.o conftest.c dummy.o *.mod + +## Build config.status, touch the stamp files, and build all the Makefiles. +## The order is such that the first `make' does not need to update any +## configuration information. See config/commence.in for the order in which +## things need to be done. + +## First the stamp1 file for rest_vol_config.h.in +mkdir ./config >/dev/null 2>&1 +touch ./config/stamp1 + +## Then the config.status file (but not makefiles) +saved_no_create=$no_create +no_create=yes + +LT_OUTPUT +no_create=$saved_no_create + +## Then the stamp2 file for rest_vol_config.h +touch ./config/stamp2 + +## Finally the makefiles +test "$no_create" = yes || ${CONFIG_SHELL-/bin/sh} $CONFIG_STATUS || exit 1 + +## Are we compiling static libraries, shared libraries, or both? This +## is only used for the libhdf5_vol_rest.settings file. We can't just look at +## $enable_static and $enable_shared because if they're yes the ltconfig +## might have decided that one or the other is simply not possible. +## Therefore we have to ask the generated `libtool' shell script +## which 'features' it has enabled. +if (./libtool --features | grep '^enable shared libraries' > /dev/null); then + enable_shared=yes +else + enable_shared=no +fi + +if (./libtool --features | grep '^enable static libraries' > /dev/null); then + enable_static=yes +else + enable_static=no +fi + +## Expose things for *.in markup +AC_SUBST([STATIC_SHARED]) +AC_SUBST([enable_shared]) +AC_SUBST([enable_static]) + +## ---------------------------------------------------------------------- +## Set a macro if shared library is enabled. +## +AM_CONDITIONAL([HAVE_SHARED_CONDITIONAL], [test "X$enable_shared" = "Xyes"]) + +AC_CONFIG_FILES([src/libhdf5_vol_rest.settings + Makefile + src/Makefile + test/Makefile + examples/Makefile + examples/run-c-ex.sh]) + +AC_OUTPUT + +## show the configure settings +cat src/libhdf5_vol_rest.settings diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 243357e4..db2c1616 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -107,28 +107,6 @@ if (BUILD_SHARED_LIBS) #add_dependencies(${HDF5_VOL_REST_LIBSH_TARGET} ${HDF5_LIBRARIES_TO_EXPORT}) endif () -#----------------------------------------------------------------------------- -# Make the REST VOL build depend on H5pubconf.h existing -#----------------------------------------------------------------------------- -if (HDF5_FOUND) - add_custom_target( - rest_vol_pubconf_depend - DEPENDS ${HDF5_BINARY_DIR}/src/H5pubconf.h - ) -else () - add_custom_target( - rest_vol_pubconf_depend - DEPENDS ${CMAKE_BINARY_DIR}/${HDF5_DIR_NAME}/src/H5pubconf.h - ) -endif () - -if (BUILD_STATIC_LIBS) - add_dependencies(${HDF5_VOL_REST_LIB_TARGET} rest_vol_pubconf_depend) -endif () -if (BUILD_SHARED_LIBS) - add_dependencies(${HDF5_VOL_REST_LIBSH_TARGET} rest_vol_pubconf_depend) -endif () - #----------------------------------------------------------------------------- # Add file(s) to CMake Install #----------------------------------------------------------------------------- diff --git a/test/vol-tests b/test/vol-tests new file mode 160000 index 00000000..d583d2a4 --- /dev/null +++ b/test/vol-tests @@ -0,0 +1 @@ +Subproject commit d583d2a44e634d2db429f99f738fa979966fab66