diff --git a/build.sh b/build.sh index 7cb9aa82c..f12986676 100755 --- a/build.sh +++ b/build.sh @@ -358,5 +358,5 @@ if buildAll || hasArg docs; then cd "${REPODIR}"/docs/cuopt make clean - make html + make html linkcheck fi diff --git a/docs/cuopt/Makefile b/docs/cuopt/Makefile index 7102ea1d4..5dc8a9a0f 100644 --- a/docs/cuopt/Makefile +++ b/docs/cuopt/Makefile @@ -23,12 +23,22 @@ SPHINXPROJ = cuOpt SOURCEDIR = source BUILDDIR = build -# Put it first so that "make" without argument is like "make help". +# Default target: build documentation and run link check +all: html linkcheck + +# Build HTML documentation +html: + @$(SPHINXBUILD) -M html "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +# Check all external links in the documentation +linkcheck: + @$(SPHINXBUILD) -M linkcheck "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +# Show help help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help clean Makefile +.PHONY: all html linkcheck help clean Makefile clean: @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/cuopt/source/conf.py b/docs/cuopt/source/conf.py index 586dce5c3..ce1629351 100644 --- a/docs/cuopt/source/conf.py +++ b/docs/cuopt/source/conf.py @@ -325,6 +325,27 @@ def setup(app): } html_search = True +# Link checker settings +linkcheck_retries = 3 +linkcheck_timeout = 30 +linkcheck_workers = 5 +linkcheck_rate_limit_timeout = 60 + +# GitHub and GitLab link checker exceptions +linkcheck_ignore = [ + # GitHub (Rate Limited) + r'https://github\.com/.*', + r'https://api\.github\.com/.*', + r'https://raw\.githubusercontent\.com/.*', + r'https://gist\.github\.com/.*', + + # GitLab (Rate Limited) + r'https://gitlab\.com/.*', + r'https://api\.gitlab\.com/.*', + r'https://gitlab\.org/.*', + r'https://api\.gitlab\.org/.*', +] + def setup(app): from sphinx.application import Sphinx from typing import Any, List diff --git a/docs/cuopt/source/cuopt-c/lp-milp/lp-example.rst b/docs/cuopt/source/cuopt-c/lp-milp/lp-example.rst index c828cf30b..26cd5aeae 100644 --- a/docs/cuopt/source/cuopt-c/lp-milp/lp-example.rst +++ b/docs/cuopt/source/cuopt-c/lp-milp/lp-example.rst @@ -5,7 +5,7 @@ LP C API Examples Example With Data ----------------- -This example demonstrates how to use the LP solver in C. More details on the API can be found in `C API `_. +This example demonstrates how to use the LP solver in C. More details on the API can be found in :doc:`C API `. Copy the code below into a file called ``lp_example.c``: diff --git a/docs/cuopt/source/cuopt-c/lp-milp/lp-milp-c-api.rst b/docs/cuopt/source/cuopt-c/lp-milp/lp-milp-c-api.rst index 4e10c8c43..11e576300 100644 --- a/docs/cuopt/source/cuopt-c/lp-milp/lp-milp-c-api.rst +++ b/docs/cuopt/source/cuopt-c/lp-milp/lp-milp-c-api.rst @@ -48,7 +48,7 @@ Certain constants are needed to define an optimization problem. These constants Objective Sense Constants ------------------------- -These constants are used to define the objective sense in the `cuOptCreateProblem `_ and `cuOptCreateRangedProblem `_ functions. +These constants are used to define the objective sense in the :c:func:`cuOptCreateProblem` and :c:func:`cuOptCreateRangedProblem` functions. .. doxygendefine:: CUOPT_MINIMIZE .. doxygendefine:: CUOPT_MAXIMIZE @@ -56,7 +56,7 @@ These constants are used to define the objective sense in the `cuOptCreateProble Constraint Sense Constants -------------------------- -These constants are used to define the constraint sense in the `cuOptCreateProblem `_ and `cuOptCreateRangedProblem `_ functions. +These constants are used to define the constraint sense in the :c:func:`cuOptCreateProblem` and :c:func:`cuOptCreateRangedProblem` functions. .. doxygendefine:: CUOPT_LESS_THAN .. doxygendefine:: CUOPT_GREATER_THAN @@ -65,7 +65,7 @@ These constants are used to define the constraint sense in the `cuOptCreateProbl Variable Type Constants ----------------------- -These constants are used to define the the variable type in the `cuOptCreateProblem `_ and `cuOptCreateRangedProblem `_ functions. +These constants are used to define the the variable type in the :c:func:`cuOptCreateProblem` and :c:func:`cuOptCreateRangedProblem` functions. .. doxygendefine:: CUOPT_CONTINUOUS .. doxygendefine:: CUOPT_INTEGER @@ -73,7 +73,7 @@ These constants are used to define the the variable type in the `cuOptCreateProb Infinity Constant ----------------- -This constant may be used to represent infinity in the `cuOptCreateProblem `_ and `cuOptCreateRangedProblem `_ functions. +This constant may be used to represent infinity in the :c:func:`cuOptCreateProblem` and :c:func:`cuOptCreateRangedProblem` functions. .. doxygendefine:: CUOPT_INFINITY @@ -118,7 +118,7 @@ When you are done with a solve you should destroy a `cuOptSolverSettings` object Setting Parameters ------------------ -The following functions are used to set and get parameters. You can find more details on the available parameters in the `LP/MILP settings <../../lp-milp-settings.html>`_ section. +The following functions are used to set and get parameters. You can find more details on the available parameters in the :doc:`LP/MILP settings <../../lp-milp-settings>` section. .. doxygenfunction:: cuOptSetParameter .. doxygenfunction:: cuOptGetParameter @@ -127,11 +127,12 @@ The following functions are used to set and get parameters. You can find more de .. doxygenfunction:: cuOptSetFloatParameter .. doxygenfunction:: cuOptGetFloatParameter +.. _parameter-constants: Parameter Constants ------------------- -These constants are used as the parameter name in the `cuOptSetParameter `_ , `cuOptGetParameter `_ and similar functions. More details on the parameters can be found in the `LP/MILP settings <../../lp-milp-settings.html>`_ section. +These constants are used as parameter names in the :c:func:`cuOptSetParameter`, :c:func:`cuOptGetParameter`, and similar functions. For more details on the available parameters, see the :doc:`LP/MILP settings <../../lp-milp-settings>` section. .. LP/MIP parameter string constants .. doxygendefine:: CUOPT_ABSOLUTE_DUAL_TOLERANCE @@ -161,20 +162,24 @@ These constants are used as the parameter name in the `cuOptSetParameter `_. +These constants are used to configure `CUOPT_PDLP_SOLVER_MODE` via :c:func:`cuOptSetIntegerParameter`. .. doxygendefine:: CUOPT_PDLP_SOLVER_MODE_STABLE1 .. doxygendefine:: CUOPT_PDLP_SOLVER_MODE_STABLE2 .. doxygendefine:: CUOPT_PDLP_SOLVER_MODE_METHODICAL1 .. doxygendefine:: CUOPT_PDLP_SOLVER_MODE_FAST1 +.. _method-constants: + Method Constants ---------------- -These constants are used to configure `CUOPT_METHOD` via `cuOptSetIntegerParameter `_. +These constants are used to configure `CUOPT_METHOD` via :c:func:`cuOptSetIntegerParameter`. .. doxygendefine:: CUOPT_METHOD_CONCURRENT .. doxygendefine:: CUOPT_METHOD_PDLP @@ -214,7 +219,7 @@ When you are finished with a `cuOptSolution` object you should destory it with Termination Status Constants ---------------------------- -These constants define the termination status received from the `cuOptGetTerminationStatus `_ function. +These constants define the termination status received from the :c:func:`cuOptGetTerminationStatus` function. .. LP/MIP termination status constants .. doxygendefine:: CUOPT_TERIMINATION_STATUS_NO_TERMINATION diff --git a/docs/cuopt/source/cuopt-c/lp-milp/milp-examples.rst b/docs/cuopt/source/cuopt-c/lp-milp/milp-examples.rst index 0dbfdcb71..d9025f787 100644 --- a/docs/cuopt/source/cuopt-c/lp-milp/milp-examples.rst +++ b/docs/cuopt/source/cuopt-c/lp-milp/milp-examples.rst @@ -5,7 +5,7 @@ MILP C API Examples Example With Data ----------------- -This example demonstrates how to use the MILP solver in C. More details on the API can be found in `C API `_. +This example demonstrates how to use the MILP solver in C. More details on the API can be found in :doc:`C API `. Copy the code below into a file called ``milp_example.c``: diff --git a/docs/cuopt/source/cuopt-cli/quick-start.rst b/docs/cuopt/source/cuopt-cli/quick-start.rst index d59d56836..6ef7a4882 100644 --- a/docs/cuopt/source/cuopt-cli/quick-start.rst +++ b/docs/cuopt/source/cuopt-cli/quick-start.rst @@ -2,7 +2,7 @@ Quickstart Guide ================= -cuopt_cli is built as part of the libcuopt package and you can follow these `instructions <../cuopt-c/quick-start.html>`_ to install it. +cuopt_cli is built as part of the libcuopt package and you can follow these :doc:`../cuopt-c/quick-start` to install it. To see all available options and their descriptions: @@ -17,4 +17,4 @@ This will display the complete list of command-line arguments and their usage: :language: shell :linenos: -Please refer to `parameter settings <../lp-milp-settings.html>`_ for more details on default values and other options. \ No newline at end of file +Please refer to :doc:`../lp-milp-settings` for more details on default values and other options. \ No newline at end of file diff --git a/docs/cuopt/source/cuopt-python/quick-start.rst b/docs/cuopt/source/cuopt-python/quick-start.rst index b4d001c9f..e1ae23f22 100644 --- a/docs/cuopt/source/cuopt-python/quick-start.rst +++ b/docs/cuopt/source/cuopt-python/quick-start.rst @@ -24,7 +24,7 @@ For CUDA 12.x: Conda ----- -NVIDIA cuOpt can be installed with Conda (via `miniforge `_ from the ``nvidia`` channel: +NVIDIA cuOpt can be installed with Conda (via `miniforge `_) from the ``nvidia`` channel: .. code-block:: bash @@ -41,16 +41,19 @@ NVIDIA cuOpt is also available as a container from Docker Hub: .. code-block:: bash - docker pull nvidia/cuopt:latest-cuda12.8-py312 + docker pull nvidia/cuopt:latest-cuda12.8-py3.12 .. note:: - The ``latest`` tag is the latest stable release of cuOpt. If you want to use a specific version, you can use the ``-cuda12.8-py312`` tag. For example, to use cuOpt 25.5.0, you can use the ``25.5.0-cuda12.8-py312`` tag. Please refer to `cuOpt dockerhub page `_ for the list of available tags. + The ``latest`` tag is the latest stable release of cuOpt. If you want to use a specific version, you can use the ``-cuda12.8-py3.12`` tag. For example, to use cuOpt 25.5.0, you can use the ``25.5.0-cuda12.8-py3.12`` tag. Please refer to `cuOpt dockerhub page `_ for the list of available tags. + +.. note:: + The nightly version of cuOpt is available as ``[VERSION]a-cuda12.8-py3.12`` tag. For example, to use cuOpt 25.8.0a, you can use the ``25.8.0a-cuda12.8-py3.12`` tag. The container includes both the Python API and self-hosted server components. To run the container: .. code-block:: bash - docker run --gpus all -it --rm nvidia/cuopt:latest-cuda12.8-py312 + docker run --gpus all -it --rm nvidia/cuopt:latest-cuda12.8-py3.12 /bin/bash This will start an interactive session with cuOpt pre-installed and ready to use. diff --git a/docs/cuopt/source/cuopt-server/client-api/sh-cli-build.rst b/docs/cuopt/source/cuopt-server/client-api/sh-cli-build.rst index f1ab6d1b3..44b6a0304 100644 --- a/docs/cuopt/source/cuopt-server/client-api/sh-cli-build.rst +++ b/docs/cuopt/source/cuopt-server/client-api/sh-cli-build.rst @@ -73,9 +73,9 @@ Success Response: {"reqId":"1df28c33-8b8c-4bb7-9ff9-1e19929094c6"} -When sending files to the server, the server must be configured with appropriate data and result directories to temporarily store these files. These directories can be set using the ``-d`` and ``-r`` options when starting the server. Please refer to the `Server CLI documentation <../server-api/server-cli.html>`_ for more details on configuring these directories. +When sending files to the server, the server must be configured with appropriate data and result directories to temporarily store these files. These directories can be set using the ``-d`` and ``-r`` options when starting the server. Please refer to the :doc:`Server CLI documentation <../server-api/server-cli>` for more details on configuring these directories. -``JSON_DATA`` should follow the `spec <../../open-api.html#operation/postrequest_cuopt_request_post>`_ described for cuOpt input. +``JSON_DATA`` should follow the :doc:`spec under "POST /cuopt/request" schema <../../open-api>` described for cuOpt input. Polling for Request Status: --------------------------- @@ -92,7 +92,7 @@ Users can poll the request id for status with the help of ``/cuopt/request/{requ In case the solver has completed the job, the response will be "completed". -Please refer to the `Solver status in spec <../../open-api.html#operation/getrequest_cuopt_request__id__get>`_ for more details on responses. +Please refer to the :doc:`Solver status in spec using "GET /cuopt/request/{request-id}" <../../open-api>` for more details on responses. cuOpt Result Retrieval @@ -106,7 +106,7 @@ Once you have received successful response from solver with status "completed", curl --location "http://$ip:$port/cuopt/solution/{request-id}" -This would fetch the result in JSON format. Please refer to the `Response structure in spec <../../open-api.html#operation/getrequest_cuopt_solution__id__get>`_ for more details on responses. +This would fetch the result in JSON format. Please refer to the :doc:`Response structure in spec using "GET /cuopt/solution/{request-id}" <../../open-api>` for more details on responses. .. important:: diff --git a/docs/cuopt/source/cuopt-server/csp-guides/csp-aws.rst b/docs/cuopt/source/cuopt-server/csp-guides/csp-aws.rst index f88dd3b1d..b79561dc1 100644 --- a/docs/cuopt/source/cuopt-server/csp-guides/csp-aws.rst +++ b/docs/cuopt/source/cuopt-server/csp-guides/csp-aws.rst @@ -46,10 +46,10 @@ Step 1: Create an AWS VM with NVAIE Image Step 2: Activate NVAIE Subscription ------------------------------------ -Once connected to the VM, generate an identity token. Activate your NVIDIA AI Enterprise subscription using that identity token on NGC. Follow the instructions `here `__. +Once connected to the VM, generate an identity token. Activate your NVIDIA AI Enterprise subscription using that identity token on NGC. Follow the instructions `here `__. Step 3: Run cuOpt ------------------ -To run cuOpt, you will need to log in to the NVIDIA Container Registry, pull the cuOpt container, and then run it. To test that it is successfully running, you can run a sample cuOpt request. This process is the same for deploying cuOpt on your own infrastructure. Refer to `Self-Hosted Service Quickstart Guide `__. +To run cuOpt, you will need to log in to the NVIDIA Container Registry, pull the cuOpt container, and then run it. To test that it is successfully running, you can run a sample cuOpt request. This process is the same for deploying cuOpt on your own infrastructure. Refer to :ref:`Self-Hosted Service Quickstart Guide `. diff --git a/docs/cuopt/source/cuopt-server/csp-guides/csp-azure.rst b/docs/cuopt/source/cuopt-server/csp-guides/csp-azure.rst index d14c30c0c..af27be0ba 100644 --- a/docs/cuopt/source/cuopt-server/csp-guides/csp-azure.rst +++ b/docs/cuopt/source/cuopt-server/csp-guides/csp-azure.rst @@ -56,12 +56,12 @@ Step 1: Create an Azure VM with NVAIE Image Step 2: Activate NVAIE Subscription ------------------------------------ -Once connected to the VM, generate an identity token. Activate your NVIDIA AI Enterprise subscription using that identity token on NGC. Follow the instructions `here `__. +Once connected to the VM, generate an identity token. Activate your NVIDIA AI Enterprise subscription using that identity token on NGC. Follow the instructions `here `__. Step 3: Run cuOpt ------------------ -To run cuOpt, you will need to log in to the NVIDIA Container Registry, pull the cuOpt container, and then run it. To test that it is successfully running, you can run a sample cuOpt request. This process is the same for deploying cuOpt on your own infrastructure. Refer `Self-Hosted Service Quickstart Guide `__. +To run cuOpt, you will need to log in to the NVIDIA Container Registry, pull the cuOpt container, and then run it. To test that it is successfully running, you can run a sample cuOpt request. This process is the same for deploying cuOpt on your own infrastructure. Refer :ref:`Self-Hosted Service Quickstart Guide `. Step 4: Mapping Visualization with Azure diff --git a/docs/cuopt/source/cuopt-server/examples/lp-examples.rst b/docs/cuopt/source/cuopt-server/examples/lp-examples.rst index 005a0f0c1..96d1e1c04 100644 --- a/docs/cuopt/source/cuopt-server/examples/lp-examples.rst +++ b/docs/cuopt/source/cuopt-server/examples/lp-examples.rst @@ -4,7 +4,7 @@ LP Python Examples The following example showcases how to use the ``CuOptServiceSelfHostClient`` to solve a simple LP problem in normal mode and batch mode (where multiple problems are solved at once). -The OpenAPI specification for the server is available in `open-api spec <../../open-api.html>`_. The example data is structured as per the OpenAPI specification for the server, please refer `LPData <../../open-api.html#/default/postrequest_cuopt_request_post>`_ under schema section. LP and MILP share same spec. +The OpenAPI specification for the server is available in :doc:`open-api spec <../../open-api>`. The example data is structured as per the OpenAPI specification for the server, please refer :doc:`LPData under "POST /cuopt/request" <../../open-api>` under schema section. LP and MILP share same spec. If you want to run server locally, please run the following command in a terminal or tmux session so you can test examples in another terminal. @@ -15,6 +15,8 @@ If you want to run server locally, please run the following command in a termina export port=5000 python -m cuopt_server.cuopt_service --ip $ip --port $port +.. _generic-example-with-normal-and-batch-mode: + Genric Example With Normal Mode and Batch Mode ------------------------------------------------ @@ -225,6 +227,8 @@ Batch mode response: .. note:: Warm start is only applicable to LP and not for MILP. +.. _warm-start: + Warm Start ---------- @@ -428,7 +432,7 @@ The response is: Generate Datamodel from MPS Parser ---------------------------------- -Use a datamodel generated from mps file as input; this yields a solution object in response. For more details please refer to `LP/MILP parameters <../../lp-milp-settings.html>`_. +Use a datamodel generated from mps file as input; this yields a solution object in response. For more details please refer to :doc:`LP/MILP parameters <../../lp-milp-settings>`. .. code-block:: python :linenos: @@ -560,13 +564,13 @@ The response would be as follows: Example with DataModel is available in the `Examples Notebooks Repository `_. -The ``data`` argument to ``get_LP_solve`` may be a dictionary of the format shown in `LP Open-API spec <../../open-api.html#operation/postrequest_cuopt_request_post>`_. More details on the response can be found under the responses schema `request and solution API spec <../../open-api.html#/default/getrequest_cuopt_request__id__get>`_. +The ``data`` argument to ``get_LP_solve`` may be a dictionary of the format shown in :doc:`LP Open-API spec <../../open-api>`. More details on the response can be found under the responses schema :doc:`"get /cuopt/request" and "get /cuopt/solution" API spec <../../open-api>`. Aborting a Running Job in Thin Client ------------------------------------- -Please refer to the `MILP Example on Aborting a Running Job in Thin Client `_ for more details. +Please refer to the :ref:`aborting-thin-client` in the MILP Example for more details. ================================================= @@ -709,7 +713,7 @@ In the case of batch mode, you can send a bunch of ``mps`` files at once, and ac Aborting a Running Job In CLI ----------------------------- -Please refer to the `MILP Example `_ for more details. +Please refer to the :ref:`aborting-cli` in the MILP Example for more details. .. note:: Please use solver settings while using .mps files. diff --git a/docs/cuopt/source/cuopt-server/examples/milp-examples.rst b/docs/cuopt/source/cuopt-server/examples/milp-examples.rst index 981bb3192..557e2f609 100644 --- a/docs/cuopt/source/cuopt-server/examples/milp-examples.rst +++ b/docs/cuopt/source/cuopt-server/examples/milp-examples.rst @@ -3,7 +3,7 @@ MILP Python Examples ======================================== The major difference between this example and the LP example is that some of the variables are integers, so ``variable_types`` need to be shared. -The OpenAPI specification for the server is available in `open-api spec <../../open-api.html>`_. The example data is structured as per the OpenAPI specification for the server, please refer `LPData <../../open-api.html#/default/postrequest_cuopt_request_post>`_ under schema section. LP and MILP share same spec. +The OpenAPI specification for the server is available in :doc:`open-api spec <../../open-api>`. The example data is structured as per the OpenAPI specification for the server, please refer :doc:`LPData data under "POST /cuopt/request" <../../open-api>` under schema section. LP and MILP share same spec. Generic Example --------------- @@ -120,9 +120,10 @@ The response would be as follows: "reqId": "524e2e37-3494-4c16-bd06-2a9bfd768f76" } +.. _incumbent-and-logging-callback: -Incumbent Solution ------------------- +Incumbent and Logging Callback +------------------------------ The incumbent solution can be retrieved using a callback function as follows: @@ -247,10 +248,12 @@ Incumbent callback response: An example with DataModel is available in the `Examples Notebooks Repository `_. -The ``data`` argument to ``get_LP_solve`` may be a dictionary of the format shown in `MILP Open-API spec <../../open-api.html#operation/postrequest_cuopt_request_post>`_. More details on the response can be found under responses schema in `request and solution API spec <../../open-api.html#/default/getrequest_cuopt_request__id__get>`_. +The ``data`` argument to ``get_LP_solve`` may be a dictionary of the format shown in :doc:`MILP Open-API spec <../../open-api>`. More details on the response can be found under responses schema in :doc:`"/cuopt/request" and "/cuopt/solution" API spec <../../open-api>`. They can be of different format as well, please check the documentation. +.. _aborting-thin-client: + Aborting a Running Job in Thin Client ------------------------------------- @@ -332,6 +335,8 @@ In case the user needs to update solver settings through CLI, the option ``-ss`` .. note:: Batch mode is not supported for MILP. +.. _aborting-cli: + Aborting a Running Job In CLI ----------------------------- diff --git a/docs/cuopt/source/cuopt-server/examples/routing-examples.rst b/docs/cuopt/source/cuopt-server/examples/routing-examples.rst index 0c33c3046..21cdf10b9 100644 --- a/docs/cuopt/source/cuopt-server/examples/routing-examples.rst +++ b/docs/cuopt/source/cuopt-server/examples/routing-examples.rst @@ -4,7 +4,7 @@ Routing Python Examples The following example showcases how to use the ``CuOptServiceSelfHostClient`` to solve a simple routing problem. -The OpenAPI specification for the server is available in `open-api spec <../../open-api.html>`_. The example data is structured as per the OpenAPI specification for the server, please refer `OptimizeRoutingData <../../open-api.html#/default/postrequest_cuopt_request_post>`_ under schema section. +The OpenAPI specification for the server is available in :doc:`open-api spec <../../open-api>`. The example data is structured as per the OpenAPI specification for the server, please refer :doc:`OptimizeRoutingData under "POST /cuopt/request" <../../open-api>` under schema section. Generic Example --------------- @@ -110,6 +110,8 @@ The response would be as follows: } +.. _initial-solution-in-python: + Initial Solution ---------------- @@ -233,9 +235,9 @@ The response would be as follows: "reqId": "ebd378a3-c02a-47f3-b0a1-adec81be7cdd" } -The ``data`` argument to ``get_optimized_routes`` may be a dictionary of the format shown in `Get Routes Open-API spec <../../open-api.html#operation/postrequest_cuopt_request_post>`_. +The ``data`` argument to ``get_optimized_routes`` may be a dictionary of the format shown in :doc:`Get Routes Open-API spec <../../open-api>`. It may also be the path of a file containing such a dictionary as JSON or written using the Python *msgpack* module. -A JSON file may optionally be compressed with zlib. More details on the responses can be found under the responses schema under `request and solution API spec <../../open-api.html#/default/getrequest_cuopt_request__id__get>`_. +A JSON file may optionally be compressed with zlib. More details on the responses can be found under the responses schema in :doc:`"get /cuopt/request" and "get /cuopt/solution" API spec <../../open-api>`. To enable HTTPS: @@ -297,7 +299,7 @@ More examples are available in the `Examples Notebooks Repository `_ for more details. +Please refer to the :ref:`aborting-thin-client` for more details. ======================================== Routing CLI Examples @@ -323,6 +325,8 @@ Invoke the CLI. export port=5000 cuopt_sh data.json -i $ip -p $port +.. _initial-solution-in-cli: + Initial Solution in CLI ----------------------- @@ -368,7 +372,7 @@ Users can also upload a solution which might have been saved for later runs. Aborting a Running Job In CLI ----------------------------- -Please refer to the `MILP Example `_ for more details. +Please refer to the :ref:`aborting-cli` for more in MILP Example. .. note:: Please use solver settings while using .mps files. diff --git a/docs/cuopt/source/cuopt-server/quick-start.rst b/docs/cuopt/source/cuopt-server/quick-start.rst index 1a09b2be9..649754404 100644 --- a/docs/cuopt/source/cuopt-server/quick-start.rst +++ b/docs/cuopt/source/cuopt-server/quick-start.rst @@ -37,24 +37,29 @@ NVIDIA cuOpt is also available as a container from Docker Hub: .. code-block:: bash - docker pull nvidia/cuopt:latest-cuda12.8-py312 + docker pull nvidia/cuopt:latest-cuda12.8-py3.12 .. note:: - The ``latest`` tag is the latest stable release of cuOpt. If you want to use a specific version, you can use the ``-cuda12.8-py312`` tag. For example, to use cuOpt 25.5.0, you can use the ``25.5.0-cuda12.8-py312`` tag. Please refer to `cuOpt dockerhub page `_ for the list of available tags. + The ``latest`` tag is the latest stable release of cuOpt. If you want to use a specific version, you can use the ``-cuda12.8-py3.12`` tag. For example, to use cuOpt 25.5.0, you can use the ``25.5.0-cuda12.8-py3.12`` tag. Please refer to `cuOpt dockerhub page `_ for the list of available tags. The container includes both the Python API and self-hosted server components. To run the container: .. code-block:: bash - docker run --gpus all -it --rm -p 8000:8000 -e CUOPT_SERVER_PORT=8000 nvidia/cuopt:latest-cuda12.8-py312 /bin/bash -c "python3 -m cuopt_server.cuopt_service" + docker run --gpus all -it --rm -p 8000:8000 -e CUOPT_SERVER_PORT=8000 nvidia/cuopt:latest-cuda12.8-py3.12 + +.. note:: + The nightly version of cuOpt is available as ``[VERSION]a-cuda12.8-py3.12`` tag. For example, to use cuOpt 25.8.0a, you can use the ``25.8.0a-cuda12.8-py3.12`` tag. .. note:: Make sure you have the NVIDIA Container Toolkit installed on your system to enable GPU support in containers. See the `installation guide `_ for details. +.. _container-from-nvidia-ngc: + Container from NVIDIA NGC ------------------------- -Step 1: Get a subscription for `NVIDIA AI Enterprise (NVAIE) `_ to get the cuOpt container to host in your cloud. +Step 1: Get a subscription for `NVIDIA AI Enterprise (NVAIE) `_ to get the cuOpt container to host in your cloud. Step 2: Once given access, users can find `cuOpt container `_ in the NGC catalog. @@ -85,7 +90,7 @@ The container includes both the Python API and self-hosted server components. To .. code-block:: bash - docker run --gpus all -it --rm -p 8000:8000 -e CUOPT_SERVER_PORT=8000 /bin/bash -c "python3 -m cuopt_server.cuopt_service" + docker run --gpus all -it --rm -p 8000:8000 -e CUOPT_SERVER_PORT=8000 NVIDIA Launchable ------------------- @@ -102,7 +107,7 @@ After installation, you can verify that cuOpt Server is working correctly by run The following example is for running the server locally. If you are using the container approach, you should comment out the server start and kill commands in the script below since the server is already running in the container. The following example is testing with a simple routing problem constuctured as Json request and sent over HTTP to the server using ``curl``.This example is running server with few configuration options such as ``--ip`` and ``--port``. -Additional configuration options for server can be found in `Server CLI `_ +Additional configuration options for server can be found in :doc:`Server CLI `. Install jq and curl for basic HTTP requests and parsing JSON responses @@ -184,7 +189,7 @@ Run the server and test # Shutdown the server kill $SERVER_PID -The Open API specification for the server is available in `open-api spec <../open-api.html>`_. +The Open API specification for the server is available in :doc:`open-api spec <../open-api>`. Example Response: diff --git a/docs/cuopt/source/faq.rst b/docs/cuopt/source/faq.rst index 7a5e4666b..881a8fe15 100644 --- a/docs/cuopt/source/faq.rst +++ b/docs/cuopt/source/faq.rst @@ -45,11 +45,11 @@ General FAQ .. dropdown:: Do I need a GPU to use cuOpt? - Yes, please refer to `system requirements `_ for GPU specifications. You can acquire a cloud instance with a supported GPU and launch cuOpt; alternatively, you can launch it in your local machine if it meets the requirements. + Yes, please refer to :doc:`system requirements ` for GPU specifications. You can acquire a cloud instance with a supported GPU and launch cuOpt; alternatively, you can launch it in your local machine if it meets the requirements. .. dropdown:: Does cuOpt use multiple GPUs/multi-GPUs/multi GPUs? - #. Yes, in cuOpt self-hosted server, a solver process per GPU can be configured to run multiple solvers. Requests are accepted in a round-robin queue. More details are available in `server api `_. + #. Yes, in cuOpt self-hosted server, a solver process per GPU can be configured to run multiple solvers. Requests are accepted in a round-robin queue. More details are available in :doc:`server api `. #. There is no support for leveraging multiple GPUs to solve a single problem or oversubscribing a single GPU for multiple solvers. .. dropdown:: The cuOpt Service is not starting: Issue with port? @@ -249,7 +249,7 @@ Routing FAQ #. cuOpt is stateless and cannot handle dynamic constraints directly, but this can be resolved with modeling. #. Dynamic reoptimization is used when there is a change in the conditions of the operation such as a vehicle getting broken, a driver calling in sick, a road block, traffic, or a high-priority order coming in. - #. The problem is prepped in such a way that the packages that are already en route are assigned to only those vehicles, and new and old deliveries will be added to this problem. Please refer to example notebooks in `cuOpt Resources `__ to understand more about how to tackle this problem. + #. The problem is prepped in such a way that the packages that are already en route are assigned to only those vehicles, and new and old deliveries will be added to this problem. Please refer to example notebooks in :doc:`cuOpt Resources ` to understand more about how to tackle this problem. .. dropdown:: Does cuOpt take an initial solution? diff --git a/docs/cuopt/source/introduction.rst b/docs/cuopt/source/introduction.rst index f7358c6c2..671446727 100644 --- a/docs/cuopt/source/introduction.rst +++ b/docs/cuopt/source/introduction.rst @@ -6,7 +6,7 @@ Introduction Using accelerated computing, NVIDIA® cuOpt optimizes operations research and logistics by enabling better, faster decisions. -As part of `NVIDIA AI Enterprise `_, NVIDIA cuOpt offers a secure, efficient way to rapidly generate world-class route optimization solutions. Using a single optimized container, you can deploy the AI microservice in under 5 minutes on accelerated NVIDIA GPU systems in the cloud, data center, workstations, or PCs. A license for NVIDIA AI Enterprise or membership in the NVIDIA Developer Program is required. For more information about NVAIE licensing, accessing NGC registry, and pulling container images, please refer to the `FAQ section `_. +As part of `NVIDIA AI Enterprise `_, NVIDIA cuOpt offers a secure, efficient way to rapidly generate world-class route optimization solutions. Using a single optimized container, you can deploy the AI microservice in under 5 minutes on accelerated NVIDIA GPU systems in the cloud, data center, workstations, or PCs. A license for NVIDIA AI Enterprise or membership in the NVIDIA Developer Program is required. For more information about NVAIE licensing, accessing NGC registry, and pulling container images, please refer to the :doc:`FAQ section `. .. note:: NVAIE support is extended to only cuOpt Routing service API. LP and MILP are not supported as part of it, they are just add-ons. @@ -106,18 +106,18 @@ Supported APIs cuOpt supports the following APIs: - C API support - - `Linear Programming (LP) - C `_ - - `Mixed Integer Linear Programming (MILP) - C `_ + - :doc:`Linear Programming (LP) - C ` + - :doc:`Mixed Integer Linear Programming (MILP) - C ` - C++ API support - cuOpt is written in C++ and includes a native C++ API. However, we do not provide documentation for the C++ API at this time. We anticipate that the C++ API will change significantly in the future. Use it at your own risk. - Python support - - `Routing (TSP, VRP, and PDP) - Python `_ + - :doc:`Routing (TSP, VRP, and PDP) - Python ` - Linear Programming (LP) and Mixed Integer Linear Programming (MILP) - cuOpt includes a Python API that is used as the backend of the cuOpt server. However, we do not provide documentation for the Python API at this time. We suggest using cuOpt server to access cuOpt via Python. We anticipate that the Python API will change significantly in the future. Use it at your own risk. - Server support - - `Linear Programming (LP) - Server `_ - - `Mixed Integer Linear Programming (MILP) - Server `_ - - `Routing (TSP, VRP, and PDP) - Server `_ + - :doc:`Linear Programming (LP) - Server ` + - :doc:`Mixed Integer Linear Programming (MILP) - Server ` + - :doc:`Routing (TSP, VRP, and PDP) - Server ` - Third-party modeling languages - `AMPL `_ - `GAMS `_ diff --git a/docs/cuopt/source/lp-features.rst b/docs/cuopt/source/lp-features.rst index 29a7e6f5c..f3861ffac 100644 --- a/docs/cuopt/source/lp-features.rst +++ b/docs/cuopt/source/lp-features.rst @@ -53,7 +53,7 @@ Warm Start A warm starts allow a user to provide an initial solution to help PDLP converge faster. The initial ``primal`` and ``dual`` solutions can be specified by the user. -Alternatively, previously run solutions can be used to warm start a new solve to decrease solve time. `Examples `_ are shared on the self-hosted page. +Alternatively, previously run solutions can be used to warm start a new solve to decrease solve time. :ref:`Examples ` are shared on the self-hosted page. PDLP Solver Mode ---------------- @@ -73,12 +73,12 @@ Method Crossover --------- -Crossover allows you to obtain a high-quality basic solution from the results of a PDLP solve. More details can be found `here `__. +Crossover allows you to obtain a high-quality basic solution from the results of a PDLP solve. More details can be found :ref:`here `. Logging Callback ---------------- -With logging callback, users can fetch server-side logs for additional debugs and to get details on solver process details. `Examples `__ are shared on the self-hosted page. +With logging callback, users can fetch server-side logs for additional debugs and to get details on solver process details. :ref:`Examples ` are shared on the self-hosted page. Infeasibility Detection @@ -102,4 +102,4 @@ The user may specify a time limit to the solver. By default the solver runs unti Batch Mode ---------- -Users can submit a set of problems which will be solved in a batch. Problems will be solved at the same time in parallel to fully utilize the GPU. Checkout `self-hosted client `_ example in thin client. +Users can submit a set of problems which will be solved in a batch. Problems will be solved at the same time in parallel to fully utilize the GPU. Checkout :ref:`self-hosted client ` example in thin client. diff --git a/docs/cuopt/source/lp-milp-settings.rst b/docs/cuopt/source/lp-milp-settings.rst index 6a5309a57..28e5105d0 100644 --- a/docs/cuopt/source/lp-milp-settings.rst +++ b/docs/cuopt/source/lp-milp-settings.rst @@ -3,8 +3,8 @@ LP and MILP Settings ================================= -This page describes the parameter settings available for cuOpt's LP and MILP solvers. These parameters are set as `string constants `_ in case of C API and in case of Server Thin client as raw strings. -Please refer to examples in `C `_ and `Server Thin client `_ for more details. +This page describes the parameter settings available for cuOpt's LP and MILP solvers. These parameters are set as :ref:`parameter constants ` in case of C API and in case of Server Thin client as raw strings. +Please refer to examples in :doc:`C ` and :doc:`Server Thin client ` for more details. .. note:: When setting parameters in thin client solver settings, remove ``CUOPT_`` from the parameter name and convert to lowercase. For example, ``CUOPT_TIME_LIMIT`` would be set as ``time_limit``. @@ -78,9 +78,9 @@ Method Note: The default method is ``Concurrent``. -C API users should use the constants defined in `C API Section for Methods `_ for this parameter. +C API users should use the constants defined in :ref:`method-constants` for this parameter. -Server Thin client users should use the `Method enum `_ for this parameter. +Server Thin client users should use the :class:`cuopt_sh_client.SolverMethod` for this parameter. @@ -97,9 +97,9 @@ overall mode from our experiments. For now, only three modes are available: ``St For now, we do not offer a mechanism to know upfront which solver mode will be the best for a specific problem. -C API users should use the constants defined in `C API Section for PDLP Solver Modes `_ for this parameter. +C API users should use the constants defined in :ref:`pdlp-solver-mode-constants` for this parameter. -Server Thin client users should use the `PDLPSolverMode enum `_ for this parameter. +Server Thin client users should use the :class:`cuopt_sh_client.PDLPSolverMode` for this parameter. Iteration Limit ^^^^^^^^^^^^^^^ @@ -133,6 +133,8 @@ detected as infeasible for PDLP to stop. Note: the default value is false. +.. _crossover: + Crossover ^^^^^^^^^ diff --git a/docs/cuopt/source/milp-features.rst b/docs/cuopt/source/milp-features.rst index 17693a5b5..814207a1c 100644 --- a/docs/cuopt/source/milp-features.rst +++ b/docs/cuopt/source/milp-features.rst @@ -53,12 +53,12 @@ Both forms are mathematically equivalent. The choice between them is a matter of Incumbent Solution Callback --------------------------- -User can provide a callback to receive new integer feasible solutions that improve the objective (called incumbents) while the solver is running. An `Incumbent Example `_ is shared on the self-hosted page. +User can provide a callback to receive new integer feasible solutions that improve the objective (called incumbents) while the solver is running. An :ref:`Incumbent Example ` is shared on the self-hosted page. Logging Callback ---------------- -A logging callback allows users to get additional information about how the solve is progressing. A `Logging Callback Example `_ is shared on the self-hosted page. +A logging callback allows users to get additional information about how the solve is progressing. A :ref:`Logging Callback Example ` is shared on the self-hosted page. Time Limit -------------- diff --git a/docs/cuopt/source/routing-features.rst b/docs/cuopt/source/routing-features.rst index 73b0a4318..9e044ff58 100644 --- a/docs/cuopt/source/routing-features.rst +++ b/docs/cuopt/source/routing-features.rst @@ -118,4 +118,4 @@ In cases where a set of orders need to be assigned to a set of vehicles, either Initial Solution ---------------- -Previously run solutions or uploaded solutions can be used as an initial solution to start a new request to boost the speed to the solution. `Examples `_ are shared on the self-hosted page. +Previously run solutions or uploaded solutions can be used as an initial solution to start a new request to boost the speed to the solution. :ref:`Examples ` are shared on the self-hosted page.