Skip to content

Commit

Permalink
Merge pull request #23 from MirageML/aman/minor-fixes
Browse files Browse the repository at this point in the history
Fixed chat and a couple other issues
  • Loading branch information
stripuramallu3 authored Oct 23, 2023
2 parents ca33b63 + d7eebb0 commit 5239de7
Show file tree
Hide file tree
Showing 6 changed files with 82 additions and 82 deletions.
6 changes: 3 additions & 3 deletions mirageml/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,12 +184,12 @@ def add_source_command(link: str = typer.Argument(default="", help="Link to the


# Delete Commands
@delete_app.command(name="source")
def delete_source_command(name: str = typer.Argument(default="", help="Name of the source to delete")):
@delete_app.command(name="source", no_args_is_help=True)
def delete_source_command(names: List[str] = typer.Argument(help="Names of the sources to delete")):
"""Delete a source"""
from .commands import delete_source

delete_source(name)
delete_source(names)


# Sync Commands
Expand Down
100 changes: 50 additions & 50 deletions mirageml/commands/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,31 +79,48 @@ def chat(files: list[str] = [], urls: list[str] = [], sources: list[str] = []):
elif user_input.lower().strip() == "reset":
break

chat_history.append({"role": "user", "content": user_input})

with Live(
Panel(
"Assistant is thinking...",
title="[bold blue]Assistant[/bold blue]",
box=HORIZONTALS,
border_style="blue",
),
console=console,
transient=True,
auto_refresh=True,
refresh_per_second=8,
) as live:
response = llm_call(
chat_history,
model=config["model"],
stream=True,
local=config["local_mode"],
)
except KeyboardInterrupt:
typer.secho("Ending chat. Goodbye!", fg=typer.colors.BRIGHT_GREEN, bold=True)
return

ai_response = ""
if config["local_mode"]:
for chunk in response:
ai_response += chunk
chat_history.append({"role": "user", "content": user_input})

with Live(
Panel(
"Assistant is thinking...",
title="[bold blue]Assistant[/bold blue]",
box=HORIZONTALS,
border_style="blue",
),
console=console,
transient=True,
auto_refresh=True,
refresh_per_second=8,
) as live:
response = llm_call(
chat_history,
model=config["model"],
stream=True,
local=config["local_mode"],
)

ai_response = ""
if config["local_mode"]:
for chunk in response:
ai_response += chunk
live.update(
Panel(
Markdown(ai_response),
title="[bold blue]Assistant[/bold blue]",
box=HORIZONTALS,
border_style="blue",
)
)
else:
for chunk in response.iter_content(chunk_size=512):
if chunk:
decoded_chunk = chunk.decode("utf-8")
ai_response += decoded_chunk
live.update(
Panel(
Markdown(ai_response),
Expand All @@ -112,31 +129,14 @@ def chat(files: list[str] = [], urls: list[str] = [], sources: list[str] = []):
border_style="blue",
)
)
else:
for chunk in response.iter_content(chunk_size=512):
if chunk:
decoded_chunk = chunk.decode("utf-8")
ai_response += decoded_chunk
live.update(
Panel(
Markdown(ai_response),
title="[bold blue]Assistant[/bold blue]",
box=HORIZONTALS,
border_style="blue",
)
)

chat_history.append({"role": "assistant", "content": ai_response})
indexed_ai_response = add_indices_to_code_blocks(ai_response)
console.print(
Panel(
Markdown(indexed_ai_response),
title="[bold blue]Assistant[/bold blue]",
box=HORIZONTALS,
border_style="blue",
)
chat_history.append({"role": "assistant", "content": ai_response})
indexed_ai_response = add_indices_to_code_blocks(ai_response)
console.print(
Panel(
Markdown(indexed_ai_response),
title="[bold blue]Assistant[/bold blue]",
box=HORIZONTALS,
border_style="blue",
)

except KeyboardInterrupt:
typer.secho("Ending chat. Goodbye!", fg=typer.colors.BRIGHT_GREEN, bold=True)
return
)
35 changes: 18 additions & 17 deletions mirageml/commands/delete_source.py
Original file line number Diff line number Diff line change
@@ -1,28 +1,29 @@
from .utils.vectordb import delete_qdrant_db, delete_remote_qdrant_db


def delete_source(name: str):
def delete_source(names: list[str]):
import typer

from .config import load_config

config = load_config()
sources = config["local"] + config["remote"]

while True:
if name not in sources:
typer.secho(
f"Source: {name} does not exist. Choose one of the following or exit:",
fg=typer.colors.BRIGHT_RED,
)
print("\n".join(sources))
name = typer.prompt("Enter source name", default="exit", show_default=False)
if not name or name == "exit":
return
else:
break
for name in names:
while True:
if name not in sources:
typer.secho(
f"Source: {name} does not exist. Choose one of the following or exit:",
fg=typer.colors.BRIGHT_RED,
)
print("\n".join(sources))
name = typer.prompt("Enter source name", default="exit", show_default=False)
if not name or name == "exit":
return
else:
break

typer.secho(f"Deleting Source: {name}...", fg=typer.colors.BRIGHT_RED)
delete_qdrant_db(name)
delete_remote_qdrant_db(name)
print(f"Deleted Source: {name}")
typer.secho(f"Deleting Source: {name}...", fg=typer.colors.BRIGHT_RED)
delete_qdrant_db(name)
delete_remote_qdrant_db(name)
print(f"Deleted Source: {name}")
6 changes: 3 additions & 3 deletions mirageml/commands/rag.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def search(live, user_input, sources): # ,local_sources, remote_sources=None):

def rank_hits(hits):
# Rank the hits based on their relevance
sorted_hits = sorted(hits, key=lambda x: x["score"], reverse=True)[:5]
sorted_hits = sorted(hits, key=lambda x: x["score"], reverse=True)[:10]
return sorted_hits


Expand Down Expand Up @@ -104,14 +104,14 @@ def rag_chat(sources):
vertical_overflow="visible",
) as live:
sorted_hits = search_and_rank(live, user_input, sources)
sources_used = [hit["payload"]["source"] for hit in sorted_hits]
sources_used = list(set([hit["payload"]["source"] for hit in sorted_hits]))
context = create_context(sorted_hits)

# Chat history that will be sent to the AI model
chat_history = [
{
"role": "system",
"content": "You are a helpful assistant that responds to questions concisely with the given context in the following format:\n{answer}\n\nSources:\n{sources}",
"content": "You are a helpful assistant. When responding to questions, provide answers concisely using the following format:\n{answer}\n\nSources:\n{sources}",
},
{
"role": "user",
Expand Down
15 changes: 7 additions & 8 deletions mirageml/commands/utils/vectordb.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,15 +102,14 @@ def create_remote_qdrant_db(collection_name, link=None, path=None):
if response.status_code == 200:
for chunk in response.iter_lines():
# process line here
link = chunk.decode("utf-8").strip()
if link:
live.update(
Panel(
f"Indexing: {link}",
title="[bold green]Indexer[/bold green]",
border_style="green",
)
link = chunk.decode("utf-8")
live.update(
Panel(
f"Indexing: {link}",
title="[bold green]Indexer[/bold green]",
border_style="green",
)
)

typer.secho(f"Created Source: {collection_name}", fg=typer.colors.GREEN, bold=True)
set_sources()
Expand Down
2 changes: 1 addition & 1 deletion mirageml_version/_version_generated.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
# Copyright Mirage ML 2023
build_number = 13
build_number = 14

0 comments on commit 5239de7

Please sign in to comment.