diff --git a/.github/push_mirror.yml b/.github/push_mirror.yml new file mode 100644 index 000000000..d1f6e8b33 --- /dev/null +++ b/.github/push_mirror.yml @@ -0,0 +1,24 @@ +#name: Push Mirror + +#on: [push, delete] + +#jobs: +# PNNL_GitLab: +# runs-on: ubuntu-22.04 +# steps: +# - uses: actions/checkout@v1 +# - uses: spyoungtech/mirror-action@master +# with: +# REMOTE: ${{ secrets.GIT_REPO_URL }} +# GIT_USERNAME: ${{ secrets.GIT_USER }} +# GIT_PASSWORD: ${{ secrets.GIT_PASSWORD }} +# GIT_PUSH_ARGS: --push-option=ci.skip --tags --force --prune +# - uses: nelonoel/branch-name@v1.0.1 +# - name: Trigger Pipeline +# run: | +# response=$(curl -X POST -F token=${{ secrets.PNNL_PIPELINE_TRIGGER_TOKEN }} -F ref=${BRANCH_NAME} https://gitlab.pnnl.gov/api/v4/projects/769/trigger/pipeline) +# exit_code=$? +# sudo apt install jq +# pipeline_id=$(echo $response | jq '.id' | sed 's/"//g') +# echo "PIPELINE_ID=${pipeline_id}" >> $GITHUB_ENV +# exit $exit_code diff --git a/.github/workflows/push_mirror.yml b/.github/workflows/push_mirror.yml deleted file mode 100644 index eda4b561b..000000000 --- a/.github/workflows/push_mirror.yml +++ /dev/null @@ -1,24 +0,0 @@ -name: Push Mirror - -on: [push, delete] - -jobs: - PNNL_GitLab: - runs-on: ubuntu-22.04 - steps: - - uses: actions/checkout@v1 - - uses: spyoungtech/mirror-action@master - with: - REMOTE: ${{ secrets.GIT_REPO_URL }} - GIT_USERNAME: ${{ secrets.GIT_USER }} - GIT_PASSWORD: ${{ secrets.GIT_PASSWORD }} - GIT_PUSH_ARGS: --push-option=ci.skip --tags --force --prune - - uses: nelonoel/branch-name@v1.0.1 - - name: Trigger Pipeline - run: | - response=$(curl -X POST -F token=${{ secrets.PNNL_PIPELINE_TRIGGER_TOKEN }} -F ref=${BRANCH_NAME} https://gitlab.pnnl.gov/api/v4/projects/769/trigger/pipeline) - exit_code=$? - sudo apt install jq - pipeline_id=$(echo $response | jq '.id' | sed 's/"//g') - echo "PIPELINE_ID=${pipeline_id}" >> $GITHUB_ENV - exit $exit_code diff --git a/.github/workflows/spack_build.yml b/.github/workflows/spack_build.yml index 5166aca55..fa75c4c3e 100644 --- a/.github/workflows/spack_build.yml +++ b/.github/workflows/spack_build.yml @@ -4,15 +4,14 @@ on: [push] jobs: hiop_spack_builds: - # 20.04 is a version shared by E4S cache and Spack binaries for x86_64 - runs-on: ubuntu-20.04 - container: spack/ubuntu-focal:latest + runs-on: ubuntu-22.04 + container: spack/ubuntu-jammy:latest strategy: matrix: spack_spec: - - hiop@develop+mpi~raja~shared~kron~sparse ^openmpi - - hiop@develop~mpi~raja~shared~kron~sparse - - hiop@develop~mpi+raja~shared~kron~sparse + - hiop@develop+mpi~raja~shared~kron~sparse ^openmpi ^libevent~openssl + - hiop@develop~mpi~raja~shared~kron~sparse ^libevent~openssl + - hiop@develop~mpi+raja~shared~kron~sparse ^libevent~openssl # We will need coinhsl for this, but what are the rules for using # a coinhsl tarball? @@ -41,13 +40,7 @@ jobs: spack env activate ./spack-env spack add $SPACK_SPEC target=x86_64 spack develop --path $(pwd) --no-clone hiop@develop - # Add E4S mirror - likely relying on spack cache but nice backup - # https://oaciss.uoregon.edu/e4s/inventory.html - # Need to add build cache before concretizing to re-use dependencies - # Using custom e4s cache due to known bug - # TODO: Update cache link after bug is resolved - spack mirror add E4S https://cache.e4s.io/23.02 - spack buildcache keys --install --trust + spack external find --all --exclude python spack concretize --reuse git config --global --add safe.directory $(pwd) spack --stacktrace install --fail-fast diff --git a/.gitlab/pnnl-ci.yml b/.gitlab/pnnl-ci.yml deleted file mode 100644 index d25387a59..000000000 --- a/.gitlab/pnnl-ci.yml +++ /dev/null @@ -1,152 +0,0 @@ -stages: - - build - -variables: - GIT_SUBMODULE_STRATEGY: recursive - -.pnnl_tags: - tags: - - k8s - - ikp - - exasgd - - deception - - marianas - -.pnnl_nohpc_tags: - # Use curl base image as ubuntu base is missing it - image: curlimages/curl - tags: - - basic - - exasgd - - ikp - - k8s - -.newell: - variables: - SLURM_Q: "newell_shared" - MY_CLUSTER: "newell" - SLURM_ARGS: --gres=gpu:1 --exclusive - -.marianas: - variables: - SLURM_Q: "dl" - MY_CLUSTER: "marianas" - SLURM_ARGS: --gres=gpu:1 --exclusive - -.incline: - variables: - SLURM_Q: "incline" - MY_CLUSTER: "incline" - SLURM_ARGS: --exclusive - -.pnnl_build: - extends: - - .pnnl_tags - variables: - CTEST_CMD: 'ctest --output-on-failure' - TIMELIMIT: '1:00:00' - stage: build - needs: [] - script: - - | - # - # NOTES: WORKDIR is on constance/marianas/newell - # ./ is only on the Kubernetes instance - # - set -xv - export WORKDIR="$HOME/gitlab/$CI_JOB_ID/" - mkdir -p "$WORKDIR" - cp -R ./* "$WORKDIR" - cd "$WORKDIR" - touch output - tail -f output & - tailpid=$! - - if [[ $MY_CLUSTER == "marianas" ]]; then - export SLURM_Q=`perl $WORKDIR/scripts/findIdleDLNodes.pl` - fi - - # Extra args for ctest - export CTEST_CMD=$CTEST_CMD - - sbatch -A EXASGD --exclusive -N 1 -n 8 -p $SLURM_Q -t $TIMELIMIT $SLURM_ARGS -o output -e output $WORKDIR/BUILD.sh $BUILD_SCRIPT_ARGS - res=1 - set +xv - while :; - do - if [[ "$(awk 'BEGIN{i=0}/BUILD_STATUS/{i++}END{print i}' output)" != "0" ]]; then - kill $tailpid - res=$(grep BUILD_STATUS output | tail -n 1 | cut -f2 -d':') - break - fi - sleep 10 - done - echo "finished batch job: $res" - exit $res - -# For PNNL CI -build_on_marianas: - extends: - - .pnnl_build - - .marianas - -build_on_newell: - extends: - - .pnnl_build - - .newell - -build_on_incline: - extends: - - .pnnl_build - - .incline - allow_failure: true - -pnnl_cleanup: - needs: [] - extends: - - .pnnl_tags - stage: .pre - variables: - GIT_STRATEGY: none - script: - # clears directory of files more than 2 hours/120 minutes old - - | - set -xv - export WORKDIR="$HOME/gitlab/" - find $WORKDIR -type d -mindepth 1 -mmin +120 -prune -print -exec rm -rf {} \; || true - ls -hal $WORKDIR - -.report-status: - extends: - - .pnnl_nohpc_tags - variables: - STATUS_PROJECT: LLNL/hiop - STATUS_NAME: PNNL_GitLab - GIT_STRATEGY: none - script: - - | - set -x - curl -L \ - -X POST \ - -H @${GITHUB_CURL_HEADERS} \ - https://api.github.com/repos/${STATUS_PROJECT}/statuses/${CI_COMMIT_SHA} \ - -d "{\"state\":\"${CI_JOB_NAME}\",\"target_url\":\"${CI_PIPELINE_URL}\",\"context\":\"${STATUS_NAME}\"}" - environment: - name: reporting-github - -pending: - stage: .pre - extends: - - .report-status - -success: - stage: .post - extends: - - .report-status - -failure: - stage: .post - extends: - - .report-status - rules: - - when: on_failure diff --git a/src/Interface/hiopInterface.hpp b/src/Interface/hiopInterface.hpp index 51dfc838a..f0b6ceced 100644 --- a/src/Interface/hiopInterface.hpp +++ b/src/Interface/hiopInterface.hpp @@ -465,7 +465,38 @@ class hiopInterfaceBase { return true; } - + + /** + * This method is used to provide an user all the hiop iterate + * procedure. @see solution_callback() for an explanation of the parameters. + * + * @param[in] x array of (local) entries of the primal variables (managed by Umpire, see note below) + * @param[in] z_L array of (local) entries of the dual variables for lower bounds (managed by Umpire, see note below) + * @param[in] z_U array of (local) entries of the dual variables for upper bounds (managed by Umpire, see note below) + * @param[in] yc array of (local) entries of the dual variables for equality constraints (managed by Umpire, see note below) + * @param[in] yd array of (local) entries of the dual variables for inequality constraints (managed by Umpire, see note below) + * @param[in] s array of the slacks added to transfer inequalities to equalities (managed by Umpire, see note below) + * @param[in] v_L array of (local) entries of the dual variables for constraint lower bounds (managed by Umpire, see note below) + * @param[in] v_U array of (local) entries of the dual variables for constraint upper bounds (managed by Umpire, see note below) + * + * @note HiOp's option `callback_mem_space` can be used to change the memory location of array parameters managaged by Umpire. + * More specifically, when `callback_mem_space` is set to `host` (and `mem_space` is `device`), HiOp transfers the + * arrays from device to host first, and then passes/returns pointers on host for the arrays managed by Umpire. These pointers + * can be then used in host memory space (without the need to rely on or use Umpire). + * + */ + virtual bool iterate_full_callback(const double* x, + const double* z_L, + const double* z_U, + const double* yc, + const double* yd, + const double* s, + const double* v_L, + const double* v_U) + { + return true; + } + /** * A wildcard function used to change the primal variables. * diff --git a/src/Optimization/hiopNlpFormulation.cpp b/src/Optimization/hiopNlpFormulation.cpp index bb3d110fe..5204681d8 100644 --- a/src/Optimization/hiopNlpFormulation.cpp +++ b/src/Optimization/hiopNlpFormulation.cpp @@ -1213,7 +1213,7 @@ bool hiopNlpFormulation::user_callback_iterate(int iter, hiopVectorPar x_host(n_vars_, vec_distrib_, comm_); x.copy_to_vectorpar(x_host); - hiopVectorPar s_host(n_vars_, vec_distrib_, comm_); + hiopVectorPar s_host(n_cons_ineq_, vec_distrib_, comm_); s.copy_to_vectorpar(s_host); hiopVectorPar zl_host(n_vars_, vec_distrib_, comm_); @@ -1271,6 +1271,73 @@ bool hiopNlpFormulation::user_callback_iterate(int iter, return bret; } +bool hiopNlpFormulation::user_callback_full_iterate(hiopVector& x, + hiopVector& z_L, + hiopVector& z_U, + hiopVector& y_c, + hiopVector& y_d, + hiopVector& s, + hiopVector& v_L, + hiopVector& v_U) +{ + assert(x.get_size()==n_vars_); + assert(y_c.get_size() == n_cons_eq_); + assert(y_d.get_size() == n_cons_ineq_); + + bool bret{false}; + + if(options->GetString("callback_mem_space")=="host" && options->GetString("mem_space")=="device") { + +#if !defined(HIOP_USE_MPI) + int* vec_distrib_ = nullptr; + MPI_Comm comm_ = MPI_COMM_SELF; +#endif + hiopVectorPar x_host(n_vars_, vec_distrib_, comm_); + x.copy_to_vectorpar(x_host); + + hiopVectorPar zl_host(n_vars_, vec_distrib_, comm_); + z_L.copy_to_vectorpar(zl_host); + + hiopVectorPar zu_host(n_vars_, vec_distrib_, comm_); + z_U.copy_to_vectorpar(zu_host); + + hiopVectorPar yc_host(n_cons_eq_, vec_distrib_, comm_); + y_c.copy_to_vectorpar(yc_host); + + hiopVectorPar yd_host(n_cons_ineq_, vec_distrib_, comm_); + y_d.copy_to_vectorpar(yd_host); + + hiopVectorPar s_host(n_cons_ineq_, vec_distrib_, comm_); + s.copy_to_vectorpar(s_host); + + hiopVectorPar vl_host(n_cons_ineq_, vec_distrib_, comm_); + v_L.copy_to_vectorpar(zl_host); + + hiopVectorPar vu_host(n_cons_ineq_, vec_distrib_, comm_); + v_U.copy_to_vectorpar(zu_host); + + bret = interface_base.iterate_full_callback(x_host.local_data_const(), + zl_host.local_data_const(), + zu_host.local_data_const(), + yc_host.local_data_const(), + yd_host.local_data_const(), + s_host.local_data_const(), + vl_host.local_data_const(), + vu_host.local_data_const()); + } else { + bret = interface_base.iterate_full_callback(x.local_data_const(), + z_L.local_data_const(), + z_U.local_data_const(), + y_c.local_data_const(), + y_d.local_data_const(), + s.local_data_const(), + v_L.local_data_const(), + v_U.local_data_const()); + } + return bret; +} + + bool hiopNlpFormulation::user_force_update(int iter, double& obj_value, hiopVector& x, diff --git a/src/Optimization/hiopNlpFormulation.hpp b/src/Optimization/hiopNlpFormulation.hpp index 460f47906..ed522f4df 100644 --- a/src/Optimization/hiopNlpFormulation.hpp +++ b/src/Optimization/hiopNlpFormulation.hpp @@ -192,6 +192,16 @@ class hiopNlpFormulation double alpha_pr, int ls_trials); + virtual + bool user_callback_full_iterate(hiopVector& x, + hiopVector& z_L, + hiopVector& z_U, + hiopVector& y_c, + hiopVector& y_d, + hiopVector& s, + hiopVector& v_L, + hiopVector& v_U); + virtual bool user_force_update(int iter, double& obj_value,