diff --git a/.env.template b/.env.template index 08825c0..02ca561 100644 --- a/.env.template +++ b/.env.template @@ -1 +1,2 @@ OPENAI_API_KEY=place-your-key-here +AWS_PROFILE=place-your-profile-here \ No newline at end of file diff --git a/.gitignore b/.gitignore index 68bc17f..fbc5980 100644 --- a/.gitignore +++ b/.gitignore @@ -158,3 +158,6 @@ cython_debug/ # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. #.idea/ + + +*.ipynb \ No newline at end of file diff --git a/plotai/llm/bedrock.py b/plotai/llm/bedrock.py new file mode 100644 index 0000000..b2a583a --- /dev/null +++ b/plotai/llm/bedrock.py @@ -0,0 +1,48 @@ +import os +import boto3 +from botocore.exceptions import ClientError + +from dotenv import load_dotenv + +load_dotenv() + + + +class Bedrock: + + + def __init__(self, model: str, region_name: str = "us-east-1"): + profile_name = os.environ.get("AWS_PROFILE") + if profile_name is None: + raise Exception( + "Please set AWS_PROFILE environment variable." + "You can obtain API key from https://console.aws.amazon.com/iam/home#/security_credentials" + ) + self.region_name = region_name + self.session = boto3.Session(profile_name=profile_name) + self.model = model + self.client = self.session.client("bedrock-runtime", region_name=self.region_name) + + + def chat(self, prompt): + conversation = [ + { + "role": "user", + "content": [{"text": prompt}], + } + ] + try: + response = self.client.converse( + modelId=self.model, + messages=conversation, + inferenceConfig={"maxTokens": 512, "temperature": 0.5, "topP": 0.9}, + ) + response_text = response["output"]["message"]["content"][0]["text"] + return response_text + except (ClientError, Exception) as e: + return f"ERROR: Can't invoke '{self.model}'. Reason: {e}" + + +# br = Bedrock("anthropic.claude-3-5-sonnet-20240620-v1:0") + +# print(br.chat("Tell me a joke")) diff --git a/plotai/llm/openai.py b/plotai/llm/openai.py index e30b0c7..8a3d5dd 100644 --- a/plotai/llm/openai.py +++ b/plotai/llm/openai.py @@ -16,13 +16,13 @@ class ChatGPT: model = "gpt-3.5-turbo" def __init__(self, model: str): - api_key = os.environ.get("OPENAI_API_KEY") - if api_key is None: + self.api_key = os.environ.get("OPENAI_API_KEY") + if self.api_key is None: raise Exception( "Please set OPENAI_API_KEY environment variable." "You can obtain API key from https://platform.openai.com/account/api-keys" ) - openai.api_key = api_key + openai.api_key = self.api_key self.model = model @property @@ -37,7 +37,7 @@ def _default_params(self): } def chat(self, prompt): - client = openai.OpenAI() + client = openai.OpenAI(api_key=self.api_key) params = { **self._default_params, diff --git a/plotai/plotai.py b/plotai/plotai.py index 09c09aa..6855e1c 100644 --- a/plotai/plotai.py +++ b/plotai/plotai.py @@ -8,8 +8,8 @@ class PlotAI: - def __init__(self, *args, **kwargs): - self.model_version = "gpt-3.5-turbo" + def __init__(self, llm, *args, **kwargs): + self.llm = llm # DataFrame to plot self.df, self.x, self.y, self.z = None, None, None, None @@ -28,7 +28,7 @@ def make(self, prompt): Logger().log({"title": "Prompt", "details": p.value}) - response = ChatGPT(model=self.model_version).chat(p.value) + response = self.llm.chat(p.value) Logger().log({"title": "Response", "details": response}) @@ -36,16 +36,3 @@ def make(self, prompt): error = executor.run(response, globals(), {"df":self.df, "x": self.x, "y": self.y, "z": self.z}) if error is not None: Logger().log({"title": "Error in code execution", "details": error}) - - # p_again = Prompt(prompt, self.df, self.x, self.y, self.z, previous_code=response, previous_error=error) - - # Logger().log({"title": "Prompt with fix", "details": p_again.value}) - - # response = ChatGPT().chat(p.value) - - # Logger().log({"title": "Response", "details": response}) - - # executor = Executor() - # error = executor.run(response, globals(), locals()) - # if error is not None: - # Logger().log({"title": "Error in code execution", "details": error}) diff --git a/plotai/prompt/prompt.py b/plotai/prompt/prompt.py index 8d83392..f3eb45b 100644 --- a/plotai/prompt/prompt.py +++ b/plotai/prompt/prompt.py @@ -13,7 +13,8 @@ def __init__(self, prompt="", df=None, x=None, y=None, z=None, previous_code="", def input_data_str(self): if self.df is not None: return f""" -```python +``` +python # pandas DataFrame ''' {self.df.head(5)} @@ -43,12 +44,14 @@ def value(self): Initial python code to be updated -```python +``` +python # TODO import required dependencies # TODO Provide the plot ``` -Output only Python code. +Output only Python code. Don't even think about providing the plot. Just the code. +There is also no need to tell me you are sending Python code, I know what I'm expecting """ if self.previous_code != "": diff --git a/requirements.txt b/requirements.txt index a1870fc..4fb1b13 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,3 +3,4 @@ pandas numpy openai python-dotenv +boto3 \ No newline at end of file