Skip to content

Commit 5240007

Browse files
committed
LlamaIndex: Add MCP server example
1 parent 240c718 commit 5240007

File tree

3 files changed

+102
-0
lines changed

3 files changed

+102
-0
lines changed
Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
"""
2+
Use an LLM to query a database in human language via MCP.
3+
Example code using LlamaIndex with vanilla Open AI and Azure Open AI.
4+
5+
https://github.com/run-llama/llama_index/tree/main/llama-index-integrations/tools/llama-index-tools-mcp
6+
7+
## Start CrateDB MCP Server
8+
```
9+
export CRATEDB_CLUSTER_URL="http://localhost:4200/"
10+
cratedb-mcp serve --transport=streamable-http
11+
```
12+
13+
## Usage
14+
```
15+
source env.standalone
16+
export OPENAI_API_KEY=sk-XJZ7pfog5Gp8Kus8D--invalid--0CJ5lyAKSefZLaV1Y9S1
17+
python demo_mcp.py
18+
```
19+
"""
20+
import asyncio
21+
from cratedb_about.instruction import Instructions
22+
23+
from dotenv import load_dotenv
24+
from llama_index.core.agent.workflow import FunctionAgent
25+
from llama_index.llms.openai import OpenAI
26+
from llama_index.tools.mcp import BasicMCPClient, McpToolSpec
27+
28+
from boot import configure_llm
29+
30+
31+
class Agent:
32+
33+
async def get_tools(self):
34+
# Connect to the CrateDB MCP server using `streamable-http` transport.
35+
mcp_client = BasicMCPClient("http://127.0.0.1:8000/mcp/")
36+
mcp_tool_spec = McpToolSpec(
37+
client=mcp_client,
38+
# Optional: Filter the tools by name
39+
# allowed_tools=["tool1", "tool2"],
40+
# Optional: Include resources in the tool list
41+
# include_resources=True,
42+
)
43+
return await mcp_tool_spec.to_tool_list_async()
44+
45+
async def get_agent(self):
46+
return FunctionAgent(
47+
name="Agent",
48+
description="CrateDB text-to-SQL agent",
49+
llm=OpenAI(model="gpt-4o"),
50+
tools=await self.get_tools(),
51+
system_prompt=Instructions.full(),
52+
)
53+
54+
async def aquery(self, query):
55+
return await (await self.get_agent()).run(query)
56+
57+
def query(self, query):
58+
return asyncio.run(self.aquery(query))
59+
60+
61+
def main():
62+
"""
63+
Use an LLM to query a database in human language.
64+
"""
65+
66+
# Configure application.
67+
load_dotenv()
68+
configure_llm()
69+
70+
# Use an agent that uses the CrateDB MCP server.
71+
agent = Agent()
72+
73+
# Invoke an inquiry.
74+
print("Running query")
75+
QUERY_STR = "What is the average value for sensor 1?"
76+
answer = agent.query(QUERY_STR)
77+
print("Query was:", QUERY_STR)
78+
print("Answer was:", answer)
79+
80+
81+
if __name__ == "__main__":
82+
main()
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
1+
cratedb-about @ git+https://github.com/crate/about.git@instructions
12
langchain-openai<0.4
23
llama-index-embeddings-langchain<0.4
34
llama-index-embeddings-openai<0.4
45
llama-index-llms-azure-openai<0.4
56
llama-index-llms-openai<0.5
7+
llama-index-tools-mcp<0.3
68
python-dotenv
79
sqlalchemy-cratedb

topic/machine-learning/llama-index/test.py

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -38,3 +38,21 @@ def test_nlsql(cratedb, capsys):
3838
# Verify the outcome.
3939
out = capsys.readouterr().out
4040
assert "Answer was: The average value for sensor 1 is approximately 17.03." in out
41+
42+
43+
def test_mcp(cratedb, capsys):
44+
"""
45+
Execute `demo_mcp.py` and verify outcome.
46+
"""
47+
48+
# Load the standalone configuration also for software testing.
49+
# On CI, `OPENAI_API_KEY` will need to be supplied externally.
50+
load_dotenv("env.standalone")
51+
52+
# Invoke the workload, in-process.
53+
from demo_mcp import main
54+
main()
55+
56+
# Verify the outcome.
57+
out = capsys.readouterr().out
58+
assert "Answer was: The average value for sensor 1 is approximately 17.03." in out

0 commit comments

Comments
 (0)