Skip to content

Commit

Permalink
Added CLI interface to query the model directly.
Browse files Browse the repository at this point in the history
Updated `README.md` and translations accordingly.
  • Loading branch information
JusticeRage committed Sep 18, 2024
1 parent fbbae19 commit bfe67a4
Show file tree
Hide file tree
Showing 26 changed files with 177 additions and 25 deletions.
19 changes: 14 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Gepetto

Gepetto is a Python script which uses various large language models to provide meaning to functions
decompiled by IDA Pro. At the moment, it can ask them to explain what a function does, and to automatically
Gepetto is a Python plugin which uses various large language models to provide meaning to functions
decompiled by IDA Pro (≥ 7.4). It can leverage them to explain what a function does, and to automatically
rename its variables. Here is a simple example of what results it can provide in mere seconds:

![](https://github.com/JusticeRage/Gepetto/blob/main/readme/comparison.png?raw=true)
Expand All @@ -28,17 +28,19 @@ method with the corresponding provider.
## Supported models

- [OpenAI](https://playground.openai.com/)
- gpt-3.5-turbo-1106
- gpt-3.5-turbo-0125
- gpt-4-turbo
- gpt-4o (recommended for beginners)
- [Groq](https://console.groq.com/playground)
- llama3-70b-8192
- [Together](https://api.together.ai/)
- mistralai/Mixtral-8x22B-Instruct-v0.1 (does not support renaming variables)
- [Ollama](https://ollama.com/)
- Any local model exposed through Ollama (will not appear if Ollama is not running)

Adding support for additional models shouldn't be too difficult, provided whatever provider you're considering exposes
an API similar to OpenAI's. Look into the `gepetto/models` folder for inspiration, or open an issue if you can't figure
it out. Also make sure you edit `ida/ui.py` to add the relevant menu entries for your addition.
it out.

## Usage

Expand All @@ -51,7 +53,14 @@ Switch between models supported by Gepetto from the Edit > Gepetto menu:

![](https://github.com/JusticeRage/Gepetto/blob/main/readme/select_model.png?raw=true)

You can also use the following hotkeys:
Gepetto also provides a CLI interface you can use to ask questions to the LLM directly from IDA. Make sure to select
`Gepetto` in the input bar:

![](https://github.com/JusticeRage/Gepetto/blob/main/readme/cli.png?raw=true)

### Hotkeys

The following hotkeys are available:

- Ask the model to explain the function: `Ctrl` + `Alt` + `H`
- Request better names for the function's variables: `Ctrl` + `Alt` + `R`
Expand Down
48 changes: 48 additions & 0 deletions gepetto/ida/cli.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import functools

import ida_kernwin
import ida_idaapi
from pyexpat.errors import messages

import gepetto.config
import gepetto.ida.handlers

CLI: ida_kernwin.cli_t = None
MESSAGES: list[dict] = [
{"role": "system", "content": _("You are a helpful assistant embedded in IDA Pro. Your role is to facilitate "
"reverse-engineering and answer programming questions.")}
] # Keep a history of the conversation to simulate LLM memory.

class GepettoCLI(ida_kernwin.cli_t):
flags = 0
sname = "Gepetto"
lname = "Gepetto - " + _("LLM chat")
hint = "Gepetto"

def OnExecuteLine(self, line):
MESSAGES.append({"role": "user", "content": line})
gepetto.config.model.query_model_async(MESSAGES, functools.partial(gepetto.ida.handlers.conversation_callback,
memory=MESSAGES))
return True

def OnKeydown(self, line, x, sellen, vkey, shift):
pass

# -----------------------------------------------------------------------------

def cli_lifecycle_callback(code, old=0):
if code == ida_idaapi.NW_OPENIDB:
CLI.register()
elif code == ida_idaapi.NW_CLOSEIDB or code == ida_idaapi.NW_TERMIDA:
CLI.unregister()

# -----------------------------------------------------------------------------

def register_cli():
global CLI
if CLI:
CLI.unregister()
cli_lifecycle_callback(ida_idaapi.NW_TERMIDA)
CLI = GepettoCLI()
if CLI.register():
ida_idaapi.notify_when(ida_idaapi.NW_TERMIDA | ida_idaapi.NW_OPENIDB | ida_idaapi.NW_CLOSEIDB, cli_lifecycle_callback)
19 changes: 19 additions & 0 deletions gepetto/ida/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,23 @@ def comment_callback(address, view, response):
view.refresh_view(False)
print(_("{model} query finished!").format(model=str(gepetto.config.model)))

# -----------------------------------------------------------------------------

def conversation_callback(response, memory):
"""
Callback that simply prints the model's response in IDA's output window.
:param response: The response returned by the model
:param memory: The list of messages exchanged so far, so that it can be updated.
:return:
"""
memory.append({"role": "assistant", "content": response})

print()
for line in response.split("\n"):
if not line.strip():
continue
print(f"{str(gepetto.config.model)}> {line}")
print()

# -----------------------------------------------------------------------------

Expand All @@ -58,6 +75,7 @@ def activate(self, ctx):
_("Can you explain what the following C function does and suggest a better name for "
"it?\n{decompiler_output}").format(decompiler_output=str(decompiler_output)),
functools.partial(comment_callback, address=idaapi.get_screen_ea(), view=v))
print(_("Request to {model} sent...").format(model=str(gepetto.config.model)))
return 1

# This action is always available.
Expand Down Expand Up @@ -126,6 +144,7 @@ def activate(self, ctx):
"JSON dictionary.").format(decompiler_output=str(decompiler_output)),
functools.partial(rename_callback, address=idaapi.get_screen_ea(), view=v),
additional_model_options={"response_format": {"type": "json_object"}})
print(_("Request to {model} sent...").format(model=str(gepetto.config.model)))
return 1

# This action is always available.
Expand Down
6 changes: 5 additions & 1 deletion gepetto/ida/ui.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,12 @@

import gepetto.config
from gepetto.ida.handlers import ExplainHandler, RenameHandler, SwapModelHandler
from gepetto.ida.cli import register_cli
import gepetto.models.model_manager


# =============================================================================
# Setup the context menu and hotkey in IDA
# Setup the menus, hotkeys and cli in IDA
# =============================================================================

class GepettoPlugin(idaapi.plugin_t):
Expand Down Expand Up @@ -64,6 +65,9 @@ def init(self):
self.menu = ContextMenuHooks()
self.menu.hook()

# Register CLI
register_cli()

return idaapi.PLUGIN_KEEP

# -----------------------------------------------------------------------------
Expand Down
Binary file modified gepetto/locales/ca_ES/LC_MESSAGES/gepetto.mo
Binary file not shown.
8 changes: 7 additions & 1 deletion gepetto/locales/ca_ES/LC_MESSAGES/gepetto.po
Original file line number Diff line number Diff line change
Expand Up @@ -124,4 +124,10 @@ msgid "Couldn't change model to {model}: {error}"
msgstr ""

msgid "Warning: Gepetto's configuration doesn't contain option {option} in section {section}!"
msgstr "Advertència: La configuració de Gepetto no conté l'opció {option} a la secció {section}!"
msgstr "Advertència: La configuració de Gepetto no conté l'opció {option} a la secció {section}!"

msgid "LLM chat"
msgstr "Conversación de LLM"

msgid "You are a helpful assistant embedded in IDA Pro. Your role is to facilitate reverse-engineering and answer programming questions."
msgstr "Ets un assistent útil integrat a IDA Pro. El teu paper és facilitar la retroenginyeria i respondre preguntes de programació."
Binary file modified gepetto/locales/es_ES/LC_MESSAGES/gepetto.mo
Binary file not shown.
6 changes: 6 additions & 0 deletions gepetto/locales/es_ES/LC_MESSAGES/gepetto.po
Original file line number Diff line number Diff line change
Expand Up @@ -126,3 +126,9 @@ msgstr ""

msgid "Warning: Gepetto's configuration doesn't contain option {option} in section {section}!"
msgstr "Advertencia: La configuración de Gepetto no contiene la opción {option} en la sección {section}!"

msgid "LLM chat"
msgstr "Conversación de LLM"

msgid "You are a helpful assistant embedded in IDA Pro. Your role is to facilitate reverse-engineering and answer programming questions."
msgstr "Eres un asistente útil integrado en IDA Pro. Tu papel es facilitar la ingeniería inversa y responder preguntas de programación."
Binary file modified gepetto/locales/fr_FR/LC_MESSAGES/gepetto.mo
Binary file not shown.
8 changes: 7 additions & 1 deletion gepetto/locales/fr_FR/LC_MESSAGES/gepetto.po
Original file line number Diff line number Diff line change
Expand Up @@ -124,4 +124,10 @@ msgid "Couldn't change model to {model}: {error}"
msgstr "Impossible de choisir {model} comme modèle : {error}"

msgid "Warning: Gepetto's configuration doesn't contain option {option} in section {section}!"
msgstr "Attention: la configuration de Gepetto ne contient pas l'option {option} dans la section {section} !"
msgstr "Attention: la configuration de Gepetto ne contient pas l'option {option} dans la section {section} !"

msgid "LLM chat"
msgstr "Conversation avec le LLM"

msgid "You are a helpful assistant embedded in IDA Pro. Your role is to facilitate reverse-engineering and answer programming questions."
msgstr "Tu es un assistant serviable intégré dans IDA Pro. Ton rôle est de faciliter la rétro-ingénierie et répondre aux questions liées à la programmation."
8 changes: 7 additions & 1 deletion gepetto/locales/gepetto.pot
Original file line number Diff line number Diff line change
Expand Up @@ -87,4 +87,10 @@ msgid "Couldn't change model to {model}: {error}"
msgstr "Couldn't change model to {model}: {error}"

msgid "Warning: Gepetto's configuration doesn't contain option {option} in section {section}!"
msgstr "Warning: Gepetto's configuration doesn't contain option {option} in section {section}!"
msgstr "Warning: Gepetto's configuration doesn't contain option {option} in section {section}!"

msgid "LLM chat"
msgstr "LLM chat"

msgid "You are a helpful assistant embedded in IDA Pro. Your role is to facilitate reverse-engineering and answer programming questions."
msgstr "You are a helpful assistant embedded in IDA Pro. Your role is to facilitate reverse-engineering and answer programming questions."
Binary file modified gepetto/locales/it_IT/LC_MESSAGES/gepetto.mo
Binary file not shown.
8 changes: 7 additions & 1 deletion gepetto/locales/it_IT/LC_MESSAGES/gepetto.po
Original file line number Diff line number Diff line change
Expand Up @@ -122,4 +122,10 @@ msgid "Couldn't change model to {model}: {error}"
msgstr ""

msgid "Warning: Gepetto's configuration doesn't contain option {option} in section {section}!"
msgstr "Avviso: La configurazione di Gepetto non contiene l'opzione {option} nella sezione {section}!"
msgstr "Avviso: La configurazione di Gepetto non contiene l'opzione {option} nella sezione {section}!"

msgid "LLM chat"
msgstr "Conversazione LLM"

msgid "You are a helpful assistant embedded in IDA Pro. Your role is to facilitate reverse-engineering and answer programming questions."
msgstr "Sei un assistente utile integrato in IDA Pro. Il tuo ruolo è facilitare l'ingegneria inversa e rispondere alle domande di programmazione."
Binary file modified gepetto/locales/ko_KR/LC_MESSAGES/gepetto.mo
Binary file not shown.
6 changes: 6 additions & 0 deletions gepetto/locales/ko_KR/LC_MESSAGES/gepetto.po
Original file line number Diff line number Diff line change
Expand Up @@ -114,3 +114,9 @@ msgstr "{model}로 모델을 변경할 수 없습니다: {error}"

msgid "Warning: Gepetto's configuration doesn't contain option {option} in section {section}!"
msgstr "경고: Gepetto의 설정에 섹션 {section}에서 옵션 {option}이(가) 포함되어 있지 않습니다!"

msgid "LLM chat"
msgstr "LLM 대화"

msgid "You are a helpful assistant embedded in IDA Pro. Your role is to facilitate reverse-engineering and answer programming questions."
msgstr "당신은 IDA Pro에 내장된 유용한 도우미입니다. 당신의 역할은 리버스 엔지니어링을 돕고 프로그래밍 질문에 답하는 것입니다."
Binary file modified gepetto/locales/ru/LC_MESSAGES/gepetto.mo
Binary file not shown.
8 changes: 7 additions & 1 deletion gepetto/locales/ru/LC_MESSAGES/gepetto.po
Original file line number Diff line number Diff line change
Expand Up @@ -122,4 +122,10 @@ msgid "Couldn't change model to {model}: {error}"
msgstr ""

msgid "Warning: Gepetto's configuration doesn't contain option {option} in section {section}!"
msgstr "Предупреждение: В конфигурации Gepetto отсутствует опция {option} в разделе {section}!"
msgstr "Предупреждение: В конфигурации Gepetto отсутствует опция {option} в разделе {section}!"

msgid "LLM chat"
msgstr "Чат LLM"

msgid "You are a helpful assistant embedded in IDA Pro. Your role is to facilitate reverse-engineering and answer programming questions."
msgstr "Вы — полезный помощник, встроенный в IDA Pro. Ваша роль — помогать в реверс-инжиниринге и отвечать на вопросы по программированию."
Binary file modified gepetto/locales/tr/LC_MESSAGES/gepetto.mo
Binary file not shown.
8 changes: 7 additions & 1 deletion gepetto/locales/tr/LC_MESSAGES/gepetto.po
Original file line number Diff line number Diff line change
Expand Up @@ -122,4 +122,10 @@ msgid "Couldn't change model to {model}: {error}"
msgstr ""

msgid "Warning: Gepetto's configuration doesn't contain option {option} in section {section}!"
msgstr "Uyarı: Gepetto'nun yapılandırmasında {section} bölümünde {option} seçeneği yok!"
msgstr "Uyarı: Gepetto'nun yapılandırmasında {section} bölümünde {option} seçeneği yok!"

msgid "LLM chat"
msgstr "LLM sohbet"

msgid "You are a helpful assistant embedded in IDA Pro. Your role is to facilitate reverse-engineering and answer programming questions."
msgstr "IDA Pro'ya gömülü yararlı bir asistansınız. Rolünüz, tersine mühendisliği kolaylaştırmak ve programlama sorularını yanıtlamaktır."
Binary file modified gepetto/locales/zh_CN/LC_MESSAGES/gepetto.mo
Binary file not shown.
8 changes: 7 additions & 1 deletion gepetto/locales/zh_CN/LC_MESSAGES/gepetto.po
Original file line number Diff line number Diff line change
Expand Up @@ -113,4 +113,10 @@ msgid "Couldn't change model to {model}: {error}"
msgstr ""

msgid "Warning: Gepetto's configuration doesn't contain option {option} in section {section}!"
msgstr "警告: Gepetto 的配置在 {section} 部分中不包含选项 {option}!"
msgstr "警告: Gepetto 的配置在 {section} 部分中不包含选项 {option}!"

msgid "LLM chat"
msgstr "LLM 对话"

msgid "You are a helpful assistant embedded in IDA Pro. Your role is to facilitate reverse-engineering and answer programming questions."
msgstr "你是嵌入在 IDA Pro 中的有用助手。你的角色是促进逆向工程并回答编程问题。"
Binary file modified gepetto/locales/zh_TW/LC_MESSAGES/gepetto.mo
Binary file not shown.
8 changes: 7 additions & 1 deletion gepetto/locales/zh_TW/LC_MESSAGES/gepetto.po
Original file line number Diff line number Diff line change
Expand Up @@ -113,4 +113,10 @@ msgid "Couldn't change model to {model}: {error}"
msgstr ""

msgid "Warning: Gepetto's configuration doesn't contain option {option} in section {section}!"
msgstr "警告: Gepetto 的配置在 {section} 部分中不包含選項 {option}!"
msgstr "警告: Gepetto 的配置在 {section} 部分中不包含選項 {option}!"

msgid "LLM chat"
msgstr "LLM 對話"

msgid "You are a helpful assistant embedded in IDA Pro. Your role is to facilitate reverse-engineering and answer programming questions."
msgstr "你是嵌入在 IDA Pro 中的有用助手。你的角色是促進逆向工程並回答程式設計問題。"
18 changes: 12 additions & 6 deletions gepetto/models/local_ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,6 @@ def __init__(self, model):
def query_model_async(self, query, cb, additional_model_options = None):
if additional_model_options is None:
additional_model_options = {}
print(_("Request to {model} sent...").format(model=self.model))
t = threading.Thread(target=self.query_model, args=[query, cb, additional_model_options])
t.start()

Expand All @@ -51,11 +50,18 @@ def query_model(self, query, cb, additional_model_options=None):
kwargs["format"] = "json"

try:
stream = self.client.generate(model=self.model,
prompt=query,
stream=False,
**kwargs)
ida_kernwin.execute_sync(functools.partial(cb, response=stream["response"]),
if type(query) is str:
conversation = [
{"role": "user", "content": query}
]
else:
conversation = query

stream = self.client.chat(model=self.model,
messages=conversation,
stream=False,
**kwargs)
ida_kernwin.execute_sync(functools.partial(cb, response=stream["message"]["content"]),
ida_kernwin.MFF_WRITE)
except Exception as e:
print(e)
Expand Down
16 changes: 11 additions & 5 deletions gepetto/models/openai.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import httpx as _httpx
import ida_kernwin
import openai
from pyexpat.errors import messages

from gepetto.models.base import LanguageModel
import gepetto.models.model_manager
Expand Down Expand Up @@ -51,19 +52,25 @@ def query_model(self, query, cb, additional_model_options=None):
"""
Function which sends a query to a GPT-API-compatible model and calls a callback when the response is available.
Blocks until the response is received
:param query: The request to send to the model
:param query: The request to send to the model. It can be a single string, or a sequence of messages in a
dictionary for a whole conversation.
:param cb: The function to which the response will be passed to.
:param additional_model_options: Additional parameters used when creating the model object. Typically, for
OpenAI, response_format={"type": "json_object"}.
"""
if additional_model_options is None:
additional_model_options = {}
try:
if type(query) is str:
conversation = [
{"role": "user", "content": query}
]
else:
conversation = query

response = self.client.chat.completions.create(
model=self.model,
messages=[
{"role": "user", "content": query}
],
messages=conversation,
**additional_model_options
)
ida_kernwin.execute_sync(functools.partial(cb, response=response.choices[0].message.content),
Expand Down Expand Up @@ -92,7 +99,6 @@ def query_model_async(self, query, cb, additional_model_options=None):
"""
if additional_model_options is None:
additional_model_options = {}
print(_("Request to {model} sent...").format(model=str(gepetto.config.model)))
t = threading.Thread(target=self.query_model, args=[query, cb, additional_model_options])
t.start()

Expand Down
Binary file added readme/cli.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.

0 comments on commit bfe67a4

Please sign in to comment.