Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,45 @@ async def run_code_executor_agent() -> None:

asyncio.run(run_code_executor_agent())

In this example, we show how to set up a `CodeExecutorAgent` agent that uses the
:py:class:`~docker.types.DeviceRequest` to expose a GPU to the container for cuda-accelerated code execution.

.. code-block:: python

import asyncio
from autogen_agentchat.agents import CodeExecutorAgent
from autogen_agentchat.messages import TextMessage
from autogen_ext.code_executors.docker import DockerCommandLineCodeExecutor
from autogen_core import CancellationToken
from docker.types import DeviceRequest


async def run_code_executor_agent() -> None:
# Create a code executor agent that uses a Docker container to execute code.
code_executor = DockerCommandLineCodeExecutor(
work_dir="coding", device_requests=[DeviceRequest(count=-1, capabilities=[["gpu"]])]
)
await code_executor.start()
code_executor_agent = CodeExecutorAgent("code_executor", code_executor=code_executor)

# Display the GPU information
task = TextMessage(
content='''Here is some code
```bash
nvidia-smi
```
''',
source="user",
)
response = await code_executor_agent.on_messages([task], CancellationToken())
print(response.chat_message)

# Stop the code executor.
await code_executor.stop()


asyncio.run(run_code_executor_agent())

In the following example, we show how to setup `CodeExecutorAgent` without `model_client` parameter for executing code blocks generated by other agents in a group chat using :py:class:`~autogen_ext.code_executors.docker.DockerCommandLineCodeExecutor`

.. code-block:: python
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@
from pydantic import BaseModel
from typing_extensions import Self

from docker.types import DeviceRequest

from .._common import (
CommandLineCodeResult,
build_python_functions_file,
Expand Down Expand Up @@ -116,6 +118,7 @@ class DockerCommandLineCodeExecutor(CodeExecutor, Component[DockerCommandLineCod
stop_container (bool, optional): If true, will automatically stop the
container when stop is called, when the context manager exits or when
the Python process exits with atext. Defaults to True.
device_requests (Optional[List[DeviceRequest]], optional): A list of device request instances to add to the container for exposing GPUs (e.g., [docker.types.DeviceRequest(count=-1, capabilities=[['gpu']])]). Defaults to None.
functions (List[Union[FunctionWithRequirements[Any, A], Callable[..., Any]]]): A list of functions that are available to the code executor. Default is an empty list.
functions_module (str, optional): The name of the module that will be created to store the functions. Defaults to "functions".
extra_volumes (Optional[Dict[str, Dict[str, str]]], optional): A dictionary of extra volumes (beyond the work_dir) to mount to the container;
Expand Down Expand Up @@ -163,6 +166,7 @@ def __init__(
bind_dir: Optional[Union[Path, str]] = None,
auto_remove: bool = True,
stop_container: bool = True,
device_requests: Optional[List[DeviceRequest]] = None,
functions: Sequence[
Union[
FunctionWithRequirements[Any, A],
Expand Down Expand Up @@ -229,6 +233,7 @@ def __init__(
self._extra_hosts = extra_hosts if extra_hosts is not None else {}
self._init_command = init_command
self._delete_tmp_files = delete_tmp_files
self._device_requests = device_requests

# Setup could take some time so we intentionally wait for the first code block to do it.
if len(functions) > 0:
Expand Down Expand Up @@ -488,6 +493,7 @@ async def start(self) -> None:
volumes={str(self.bind_dir.resolve()): {"bind": "/workspace", "mode": "rw"}, **self._extra_volumes},
working_dir="/workspace",
extra_hosts=self._extra_hosts,
device_requests=self._device_requests,
)
await asyncio.to_thread(self._container.start)

Expand Down
Loading