diff --git a/README.md b/README.md index b43ec46ad..99ee76e6f 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ [![Build Status](https://github.com/NVIDIA/cuopt/actions/workflows/build.yaml/badge.svg)](https://github.com/NVIDIA/cuopt/actions/workflows/build.yaml) -NVIDIA® cuOpt™ is a GPU-accelerated optimization engine that excels in mixed integer linear programming (MILP), linear programming (LP), and vehicle routing problems (VRP). It enables near real-time solutions for large-scale challenges with millions of variables and constraints, offering +NVIDIA® cuOpt™ is a GPU-accelerated optimization engine that excels in mixed integer linear programming (MILP), linear programming (LP), and vehicle routing problems (VRP). It enables near real-time solutions for large-scale challenges with millions of variables and constraints, offering easy integration into existing solvers and seamless deployment across hybrid and multi-cloud environments. The core engine is written in C++ and wrapped with a C API, Python API and Server API. @@ -26,7 +26,7 @@ cuOpt supports the following APIs: - Linear Programming (LP) - Mixed Integer Linear Programming (MILP) - Routing (TSP, VRP, and PDP) - + This repo is also hosted as a [COIN-OR](http://github.com/coin-or/cuopt/) project. ## Installation @@ -39,7 +39,7 @@ This repo is also hosted as a [COIN-OR](http://github.com/coin-or/cuopt/) projec ### Python requirements -* Python >=3.10.x, <= 3.12.x +* Python >=3.10, <=3.13 ### OS requirements @@ -69,27 +69,23 @@ Development wheels are available as nightlies, please update `--extra-index-url` ### Conda -cuOpt can be installed with conda (via [miniforge](https://github.com/conda-forge/miniforge)) from the `nvidia` channel: - -All other dependencies are installed automatically when cuopt-server and cuopt-sh-client are installed. +cuOpt can be installed with conda (via [miniforge](https://github.com/conda-forge/miniforge)): -Users who are used to conda env based workflows would benefit with conda packages readily available for cuOpt. +All other dependencies are installed automatically when `cuopt-server` and `cuopt-sh-client` are installed. -For CUDA 12.x: ```bash -conda install -c rapidsai -c conda-forge -c nvidia \ - cuopt-server=25.08.* cuopt-sh-client=25.08.* python=3.12 cuda-version=12.8 +conda install -c rapidsai -c conda-forge -c nvidia cuopt-server=25.08.* cuopt-sh-client=25.08.* ``` -We also provide [nightly Conda packages](https://anaconda.org/rapidsai-nightly) built from the HEAD +We also provide [nightly conda packages](https://anaconda.org/rapidsai-nightly) built from the HEAD of our latest development branch. Just replace `-c rapidsai` with `-c rapidsai-nightly`. -### Container +### Container Users can pull the cuOpt container from the NVIDIA container registry. ```bash -docker pull nvidia/cuopt:latest-cuda12.8-py312 +docker pull nvidia/cuopt:latest-cuda12.8-py312 ``` Note: The ``latest`` tag is the latest stable release of cuOpt. If you want to use a specific version, you can use the ``-cuda12.8-py312`` tag. For example, to use cuOpt 25.5.0, you can use the ``25.5.0-cuda12.8-py312`` tag. Please refer to `cuOpt dockerhub page `_ for the list of available tags. diff --git a/conda/environments/all_cuda-128_arch-aarch64.yaml b/conda/environments/all_cuda-128_arch-aarch64.yaml index 5a43f25cf..55d283e0b 100644 --- a/conda/environments/all_cuda-128_arch-aarch64.yaml +++ b/conda/environments/all_cuda-128_arch-aarch64.yaml @@ -4,8 +4,6 @@ channels: - rapidsai - rapidsai-nightly - conda-forge -- nvidia -- nvidia/label/cuda-12.4.0 dependencies: - breathe - c-compiler @@ -50,11 +48,11 @@ dependencies: - myst-parser - ninja - notebook -- numba-cuda>=0.11.0,<0.12.0a0 -- numba>=0.59.1,<0.62.0a0 +- numba-cuda>=0.14.0 +- numba>=0.59.1 - numpy>=1.23.5,<3.0a0 - numpydoc -- pandas>=2.0,<2.2.3dev0 +- pandas>=2.0 - pexpect - pip - polyline diff --git a/conda/environments/all_cuda-128_arch-x86_64.yaml b/conda/environments/all_cuda-128_arch-x86_64.yaml index 4d9a29f75..30b6c1718 100644 --- a/conda/environments/all_cuda-128_arch-x86_64.yaml +++ b/conda/environments/all_cuda-128_arch-x86_64.yaml @@ -4,8 +4,6 @@ channels: - rapidsai - rapidsai-nightly - conda-forge -- nvidia -- nvidia/label/cuda-12.4.0 dependencies: - breathe - c-compiler @@ -50,11 +48,11 @@ dependencies: - myst-parser - ninja - notebook -- numba-cuda>=0.11.0,<0.12.0a0 -- numba>=0.59.1,<0.62.0a0 +- numba-cuda>=0.14.0 +- numba>=0.59.1 - numpy>=1.23.5,<3.0a0 - numpydoc -- pandas>=2.0,<2.2.3dev0 +- pandas>=2.0 - pexpect - pip - polyline diff --git a/conda/recipes/cuopt/recipe.yaml b/conda/recipes/cuopt/recipe.yaml index b716c3f79..b6d4b74f9 100644 --- a/conda/recipes/cuopt/recipe.yaml +++ b/conda/recipes/cuopt/recipe.yaml @@ -81,10 +81,10 @@ requirements: - cuvs =${{ dep_minor_version }} - h5py - libcuopt =${{ version }} - - numba >=0.59.1,<0.62.0a0 - - numba-cuda >=0.11.0,<0.12.0a0 + - numba >=0.59.1 + - numba-cuda >=0.11.0 - numpy >=1.23,<3.0a0 - - pandas>=2 + - pandas >=2.0 - pylibraft =${{ dep_minor_version }} - python - raft-dask =${{ dep_minor_version }} diff --git a/dependencies.yaml b/dependencies.yaml index cdf497bc1..aded6611f 100644 --- a/dependencies.yaml +++ b/dependencies.yaml @@ -249,8 +249,6 @@ channels: - rapidsai - rapidsai-nightly - conda-forge - - nvidia - - nvidia/label/cuda-12.4.0 dependencies: rapids_build_backend: common: @@ -358,10 +356,10 @@ dependencies: common: - output_types: [conda, requirements, pyproject] packages: - - numba-cuda>=0.11.0,<0.12.0a0 - - numba>=0.59.1,<0.62.0a0 + - numba-cuda>=0.14.0 + - numba>=0.59.1 - rapids-dask-dependency==25.8.*,>=0.0.0a0 - - &pandas pandas>=2.0,<2.2.3dev0 + - &pandas pandas>=2.0 - output_types: requirements packages: # pip recognizes the index as a global option for the requirements.txt file diff --git a/docs/cuopt/source/cuopt-c/quick-start.rst b/docs/cuopt/source/cuopt-c/quick-start.rst index 690903b03..bb262bf4e 100644 --- a/docs/cuopt/source/cuopt-c/quick-start.rst +++ b/docs/cuopt/source/cuopt-c/quick-start.rst @@ -28,14 +28,11 @@ Conda NVIDIA cuOpt can be installed with Conda (via `miniforge `_) from the ``nvidia`` channel: -For CUDA 12.x: - .. code-block:: bash - + # This is a deprecated module and no longer used, but it shares the same name for the CLI, so we need to uninstall it first if it exists. conda remove cuopt-thin-client - conda install -c rapidsai -c conda-forge -c nvidia \ - libcuopt=25.08.* python=3.12 cuda-version=12.8 + conda install -c rapidsai -c conda-forge -c nvidia libcuopt=25.08.* -Please visit examples under each section to learn how to use the cuOpt C API. \ No newline at end of file +Please visit examples under each section to learn how to use the cuOpt C API. diff --git a/docs/cuopt/source/cuopt-python/quick-start.rst b/docs/cuopt/source/cuopt-python/quick-start.rst index 42e26b9fa..b4d001c9f 100644 --- a/docs/cuopt/source/cuopt-python/quick-start.rst +++ b/docs/cuopt/source/cuopt-python/quick-start.rst @@ -18,20 +18,17 @@ For CUDA 12.x: .. note:: - For development wheels which are available as nightlies, please update `--extra-index-url` to `https://pypi.anaconda.org/rapidsai-wheels-nightly/simple/`. + For development wheels which are available as nightlies, please update `--extra-index-url` to `https://pypi.anaconda.org/rapidsai-wheels-nightly/simple/`. Conda ----- -NVIDIA cuOpt can be installed with Conda (via `miniforge `_) from the ``nvidia`` channel: - -For CUDA 12.x: +NVIDIA cuOpt can be installed with Conda (via `miniforge `_ from the ``nvidia`` channel: .. code-block:: bash - conda install -c rapidsai -c conda-forge -c nvidia \ - cuopt=25.08.* python=3.12 cuda-version=12.8 + conda install -c rapidsai -c conda-forge -c nvidia cuopt=25.08.* .. note:: For development conda packages which are available as nightlies, please update `-c rapidsai` to `-c rapidsai-nightly`. @@ -47,13 +44,13 @@ NVIDIA cuOpt is also available as a container from Docker Hub: docker pull nvidia/cuopt:latest-cuda12.8-py312 .. note:: - The ``latest`` tag is the latest stable release of cuOpt. If you want to use a specific version, you can use the ``-cuda12.8-py312`` tag. For example, to use cuOpt 25.5.0, you can use the ``25.5.0-cuda12.8-py312`` tag. Please refer to `cuOpt dockerhub page `_ for the list of available tags. + The ``latest`` tag is the latest stable release of cuOpt. If you want to use a specific version, you can use the ``-cuda12.8-py312`` tag. For example, to use cuOpt 25.5.0, you can use the ``25.5.0-cuda12.8-py312`` tag. Please refer to `cuOpt dockerhub page `_ for the list of available tags. The container includes both the Python API and self-hosted server components. To run the container: .. code-block:: bash - docker run --gpus all -it --rm nvidia/cuopt:latest-cuda12.8-py312 + docker run --gpus all -it --rm nvidia/cuopt:latest-cuda12.8-py312 This will start an interactive session with cuOpt pre-installed and ready to use. @@ -93,7 +90,7 @@ After installation, you can verify that NVIDIA cuOpt is working correctly by run Example Response: .. code-block:: text - + route arrival_stamp truck_id location type 0 0.0 0 0 Depot 2 2.0 0 2 Delivery diff --git a/docs/cuopt/source/cuopt-server/quick-start.rst b/docs/cuopt/source/cuopt-server/quick-start.rst index 4189f4653..1a09b2be9 100644 --- a/docs/cuopt/source/cuopt-server/quick-start.rst +++ b/docs/cuopt/source/cuopt-server/quick-start.rst @@ -20,14 +20,11 @@ For CUDA 12.x: Conda ----- -cuOpt Server can be installed with Conda (via `miniforge `_) from the ``nvidia`` channel: - -For CUDA 12.x: +cuOpt Server can be installed with Conda (via `miniforge `_ from the ``nvidia`` channel: .. code-block:: bash - conda install -c rapidsai -c conda-forge -c nvidia \ - cuopt-server=25.08.* cuopt-sh-client=25.08.* python=3.12 cuda-version=12.8 + conda install -c rapidsai -c conda-forge -c nvidia cuopt-server=25.08.* cuopt-sh-client=25.08.* .. note:: For development conda packages which are available as nightlies, please update `-c rapidsai` to `-c rapidsai-nightly`. @@ -43,7 +40,7 @@ NVIDIA cuOpt is also available as a container from Docker Hub: docker pull nvidia/cuopt:latest-cuda12.8-py312 .. note:: - The ``latest`` tag is the latest stable release of cuOpt. If you want to use a specific version, you can use the ``-cuda12.8-py312`` tag. For example, to use cuOpt 25.5.0, you can use the ``25.5.0-cuda12.8-py312`` tag. Please refer to `cuOpt dockerhub page `_ for the list of available tags. + The ``latest`` tag is the latest stable release of cuOpt. If you want to use a specific version, you can use the ``-cuda12.8-py312`` tag. For example, to use cuOpt 25.5.0, you can use the ``25.5.0-cuda12.8-py312`` tag. Please refer to `cuOpt dockerhub page `_ for the list of available tags. The container includes both the Python API and self-hosted server components. To run the container: @@ -68,7 +65,7 @@ Step 3: Access NGC registry: Step 4: Pull the container: -* Go to the container section for cuOpt and copy the pull tag for the latest image. +* Go to the container section for cuOpt and copy the pull tag for the latest image. * Log into the nvcr.io container registry in your cluster setup, using the NGC API key as shown below. .. code-block:: bash @@ -114,7 +111,7 @@ Install jq and curl for basic HTTP requests and parsing JSON responses sudo apt install jq curl -Run the server and test +Run the server and test .. code-block:: bash @@ -165,7 +162,7 @@ Run the server and test RESPONSE=$(curl --location "http://${SERVER_IP}:${SERVER_PORT}/cuopt/solution/${REQID}" \ --header 'Content-Type: application/json' \ --header "CLIENT-VERSION: custom") - + if echo "$RESPONSE" | jq -e 'has("response")' > /dev/null 2>&1; then echo "Got solution response:" echo "$RESPONSE" | jq '.' 2>/dev/null || echo "$RESPONSE" @@ -174,12 +171,12 @@ Run the server and test echo "Response status:" echo "$RESPONSE" | jq '.' 2>/dev/null || echo "$RESPONSE" fi - + if [ $i -eq 5 ]; then echo "Error: Timed out waiting for solution" exit 1 fi - + echo "Waiting for solution..." sleep 1 done @@ -206,7 +203,7 @@ Example Response: "0": { "task_id": [ "Depot", - "0", + "0", "Depot" ], "arrival_stamp": [ @@ -235,4 +232,4 @@ Example Response: "total_solve_time": 0.10999655723571777 }, "reqId": "afea72c2-6c76-45ce-bcf7-0d55049f32e4" - } + } diff --git a/python/cuopt/pyproject.toml b/python/cuopt/pyproject.toml index d839fc884..309fa478d 100644 --- a/python/cuopt/pyproject.toml +++ b/python/cuopt/pyproject.toml @@ -36,10 +36,10 @@ dependencies = [ "cupy-cuda12x", "cuvs==25.8.*,>=0.0.0a0", "libcuopt==25.8.*,>=0.0.0a0", - "numba-cuda>=0.11.0,<0.12.0a0", - "numba>=0.59.1,<0.62.0a0", + "numba-cuda>=0.14.0", + "numba>=0.59.1", "numpy>=1.23.5,<3.0a0", - "pandas>=2.0,<2.2.3dev0", + "pandas>=2.0", "pylibraft==25.8.*,>=0.0.0a0", "raft-dask==25.8.*,>=0.0.0a0", "rapids-dask-dependency==25.8.*,>=0.0.0a0", diff --git a/python/cuopt_server/pyproject.toml b/python/cuopt_server/pyproject.toml index be51b8b69..ed8f9b8f2 100644 --- a/python/cuopt_server/pyproject.toml +++ b/python/cuopt_server/pyproject.toml @@ -39,7 +39,7 @@ dependencies = [ "msgpack-numpy==0.4.8", "msgpack==1.1.0", "numpy>=1.23.5,<3.0a0", - "pandas>=2.0,<2.2.3dev0", + "pandas>=2.0", "psutil==5.9.6", "uvicorn==0.34.*", ] # This list was generated by `rapids-dependency-file-generator`. To make changes, edit ../../dependencies.yaml and run `rapids-dependency-file-generator`. diff --git a/python/libcuopt/libcuopt/tests/test_cli.sh b/python/libcuopt/libcuopt/tests/test_cli.sh index 49ccd48ce..e05d2f3e5 100644 --- a/python/libcuopt/libcuopt/tests/test_cli.sh +++ b/python/libcuopt/libcuopt/tests/test_cli.sh @@ -17,10 +17,11 @@ set -euo pipefail -# Add cuopt_cli path to PATh variable -PATH="$(pyenv root)/versions/$(pyenv version-name)/bin:$PATH" -export PATH - +# Add cuopt_cli path to PATH variable +if command -v pyenv &> /dev/null; then + PATH="$(pyenv root)/versions/$(pyenv version-name)/bin:$PATH" + export PATH +fi # Test the CLI