From 89909115226dfa2c26799dbbd684428dc12198f6 Mon Sep 17 00:00:00 2001 From: jayceslesar Date: Sun, 16 Apr 2023 14:02:48 -0400 Subject: [PATCH 1/2] unify annotations to future syntax --- autogpt/agent/agent_manager.py | 9 +++++---- autogpt/commands/evaluate_code.py | 4 ++-- autogpt/commands/file_operations.py | 8 +++++--- autogpt/commands/google_search.py | 5 +++-- autogpt/commands/improve_code.py | 5 +++-- autogpt/commands/web_playwright.py | 5 +++-- autogpt/commands/web_requests.py | 11 ++++++----- autogpt/commands/web_selenium.py | 9 +++++---- autogpt/commands/write_tests.py | 7 ++++--- autogpt/config/ai_config.py | 6 ++++-- autogpt/json_fixes/bracket_termination.py | 5 +++-- autogpt/json_fixes/parsing.py | 11 ++++++----- autogpt/llm_utils.py | 13 +++++++------ autogpt/memory/local.py | 10 ++++++---- autogpt/memory/no_memory.py | 8 +++++--- autogpt/memory/redismem.py | 8 +++++--- autogpt/processing/html.py | 7 ++++--- autogpt/promptgenerator.py | 8 +++++--- autogpt/token_counter.py | 4 ++-- 19 files changed, 83 insertions(+), 60 deletions(-) diff --git a/autogpt/agent/agent_manager.py b/autogpt/agent/agent_manager.py index 3467f8bf331e..e4bfb12611d4 100644 --- a/autogpt/agent/agent_manager.py +++ b/autogpt/agent/agent_manager.py @@ -1,5 +1,6 @@ """Agent manager for managing GPT agents""" -from typing import List, Tuple, Union +from __future__ import annotations + from autogpt.llm_utils import create_chat_completion from autogpt.config.config import Singleton @@ -14,7 +15,7 @@ def __init__(self): # Create new GPT agent # TODO: Centralise use of create_chat_completion() to globally enforce token limit - def create_agent(self, task: str, prompt: str, model: str) -> Tuple[int, str]: + def create_agent(self, task: str, prompt: str, model: str) -> tuple[int, str]: """Create a new agent and return its key Args: @@ -47,7 +48,7 @@ def create_agent(self, task: str, prompt: str, model: str) -> Tuple[int, str]: return key, agent_reply - def message_agent(self, key: Union[str, int], message: str) -> str: + def message_agent(self, key: str | int, message: str) -> str: """Send a message to an agent and return its response Args: @@ -73,7 +74,7 @@ def message_agent(self, key: Union[str, int], message: str) -> str: return agent_reply - def list_agents(self) -> List[Tuple[Union[str, int], str]]: + def list_agents(self) -> list[tuple[str | int, str]]: """Return a list of all agents Returns: diff --git a/autogpt/commands/evaluate_code.py b/autogpt/commands/evaluate_code.py index a36952e5e0e5..8f7cbca9c1bf 100644 --- a/autogpt/commands/evaluate_code.py +++ b/autogpt/commands/evaluate_code.py @@ -1,10 +1,10 @@ """Code evaluation module.""" -from typing import List +from __future__ import annotations from autogpt.llm_utils import call_ai_function -def evaluate_code(code: str) -> List[str]: +def evaluate_code(code: str) -> list[str]: """ A function that takes in a string and returns a response from create chat completion api call. diff --git a/autogpt/commands/file_operations.py b/autogpt/commands/file_operations.py index 31500e8e6977..2911d601758b 100644 --- a/autogpt/commands/file_operations.py +++ b/autogpt/commands/file_operations.py @@ -1,8 +1,10 @@ """File operations for AutoGPT""" +from __future__ import annotations + import os import os.path from pathlib import Path -from typing import Generator, List +from typing import Generator # Set a dedicated folder for file I/O WORKING_DIRECTORY = Path(os.getcwd()) / "auto_gpt_workspace" @@ -214,14 +216,14 @@ def delete_file(filename: str) -> str: return f"Error: {str(e)}" -def search_files(directory: str) -> List[str]: +def search_files(directory: str) -> list[str]: """Search for files in a directory Args: directory (str): The directory to search in Returns: - List[str]: A list of files found in the directory + list[str]: A list of files found in the directory """ found_files = [] diff --git a/autogpt/commands/google_search.py b/autogpt/commands/google_search.py index 6deb9b5033cc..148ba1d0e1cf 100644 --- a/autogpt/commands/google_search.py +++ b/autogpt/commands/google_search.py @@ -1,6 +1,7 @@ """Google search command for Autogpt.""" +from __future__ import annotations + import json -from typing import List, Union from duckduckgo_search import ddg @@ -33,7 +34,7 @@ def google_search(query: str, num_results: int = 8) -> str: return json.dumps(search_results, ensure_ascii=False, indent=4) -def google_official_search(query: str, num_results: int = 8) -> Union[str, List[str]]: +def google_official_search(query: str, num_results: int = 8) -> str | list[str]: """Return the results of a google search using the official Google API Args: diff --git a/autogpt/commands/improve_code.py b/autogpt/commands/improve_code.py index 05fe89e9ed11..e3440d8b7c6e 100644 --- a/autogpt/commands/improve_code.py +++ b/autogpt/commands/improve_code.py @@ -1,10 +1,11 @@ +from __future__ import annotations + import json -from typing import List from autogpt.llm_utils import call_ai_function -def improve_code(suggestions: List[str], code: str) -> str: +def improve_code(suggestions: list[str], code: str) -> str: """ A function that takes in code and suggestions and returns a response from create chat completion api call. diff --git a/autogpt/commands/web_playwright.py b/autogpt/commands/web_playwright.py index 93a46ac9c7df..a1abb6cb73d2 100644 --- a/autogpt/commands/web_playwright.py +++ b/autogpt/commands/web_playwright.py @@ -1,4 +1,6 @@ """Web scraping commands using Playwright""" +from __future__ import annotations + try: from playwright.sync_api import sync_playwright except ImportError: @@ -7,7 +9,6 @@ ) from bs4 import BeautifulSoup from autogpt.processing.html import extract_hyperlinks, format_hyperlinks -from typing import List, Union def scrape_text(url: str) -> str: @@ -45,7 +46,7 @@ def scrape_text(url: str) -> str: return text -def scrape_links(url: str) -> Union[str, List[str]]: +def scrape_links(url: str) -> str | list[str]: """Scrape links from a webpage Args: diff --git a/autogpt/commands/web_requests.py b/autogpt/commands/web_requests.py index a6161ec57d7f..50d8d383cb1a 100644 --- a/autogpt/commands/web_requests.py +++ b/autogpt/commands/web_requests.py @@ -1,5 +1,6 @@ """Browse a webpage and summarize it using the LLM model""" -from typing import List, Tuple, Union +from __future__ import annotations + from urllib.parse import urljoin, urlparse import requests @@ -66,7 +67,7 @@ def check_local_file_access(url: str) -> bool: def get_response( url: str, timeout: int = 10 -) -> Union[Tuple[None, str], Tuple[Response, None]]: +) -> tuple[None, str] | tuple[Response, None]: """Get the response from a URL Args: @@ -74,7 +75,7 @@ def get_response( timeout (int): The timeout for the HTTP request Returns: - Tuple[None, str] | Tuple[Response, None]: The response and error message + tuple[None, str] | tuple[Response, None]: The response and error message Raises: ValueError: If the URL is invalid @@ -136,14 +137,14 @@ def scrape_text(url: str) -> str: return text -def scrape_links(url: str) -> Union[str, List[str]]: +def scrape_links(url: str) -> str | list[str]: """Scrape links from a webpage Args: url (str): The URL to scrape links from Returns: - Union[str, List[str]]: The scraped links + str | list[str]: The scraped links """ response, error_message = get_response(url) if error_message: diff --git a/autogpt/commands/web_selenium.py b/autogpt/commands/web_selenium.py index 359803eed3f7..1d078d76d7fe 100644 --- a/autogpt/commands/web_selenium.py +++ b/autogpt/commands/web_selenium.py @@ -1,4 +1,6 @@ """Selenium web scraping module.""" +from __future__ import annotations + from selenium import webdriver from autogpt.processing.html import extract_hyperlinks, format_hyperlinks import autogpt.processing.text as summary @@ -15,13 +17,12 @@ import logging from pathlib import Path from autogpt.config import Config -from typing import List, Tuple, Union FILE_DIR = Path(__file__).parent.parent CFG = Config() -def browse_website(url: str, question: str) -> Tuple[str, WebDriver]: +def browse_website(url: str, question: str) -> tuple[str, WebDriver]: """Browse a website and return the answer and links to the user Args: @@ -43,7 +44,7 @@ def browse_website(url: str, question: str) -> Tuple[str, WebDriver]: return f"Answer gathered from website: {summary_text} \n \n Links: {links}", driver -def scrape_text_with_selenium(url: str) -> Tuple[WebDriver, str]: +def scrape_text_with_selenium(url: str) -> tuple[WebDriver, str]: """Scrape text from a website using selenium Args: @@ -97,7 +98,7 @@ def scrape_text_with_selenium(url: str) -> Tuple[WebDriver, str]: return driver, text -def scrape_links_with_selenium(driver: WebDriver, url: str) -> List[str]: +def scrape_links_with_selenium(driver: WebDriver, url: str) -> list[str]: """Scrape links from a website using selenium Args: diff --git a/autogpt/commands/write_tests.py b/autogpt/commands/write_tests.py index f1d6c9b2ce82..138a1adb6f83 100644 --- a/autogpt/commands/write_tests.py +++ b/autogpt/commands/write_tests.py @@ -1,16 +1,17 @@ """A module that contains a function to generate test cases for the submitted code.""" +from __future__ import annotations + import json -from typing import List from autogpt.llm_utils import call_ai_function -def write_tests(code: str, focus: List[str]) -> str: +def write_tests(code: str, focus: list[str]) -> str: """ A function that takes in code and focus topics and returns a response from create chat completion api call. Parameters: - focus (List): A list of suggestions around what needs to be improved. + focus (list): A list of suggestions around what needs to be improved. code (str): Code for test cases to be generated against. Returns: A result string from create chat completion. Test cases for the submitted code diff --git a/autogpt/config/ai_config.py b/autogpt/config/ai_config.py index 014e360f870a..86171357ba0b 100644 --- a/autogpt/config/ai_config.py +++ b/autogpt/config/ai_config.py @@ -2,8 +2,10 @@ """ A module that contains the AIConfig class object that contains the configuration """ +from __future__ import annotations + import os -from typing import List, Optional, Type +from typing import Type import yaml @@ -18,7 +20,7 @@ class AIConfig: """ def __init__( - self, ai_name: str = "", ai_role: str = "", ai_goals: Optional[List] = None + self, ai_name: str = "", ai_role: str = "", ai_goals: list | None = None ) -> None: """ Initialize a class instance diff --git a/autogpt/json_fixes/bracket_termination.py b/autogpt/json_fixes/bracket_termination.py index 692461aad5d0..822eed4a5468 100644 --- a/autogpt/json_fixes/bracket_termination.py +++ b/autogpt/json_fixes/bracket_termination.py @@ -1,7 +1,8 @@ """Fix JSON brackets.""" +from __future__ import annotations + import contextlib import json -from typing import Optional import regex from colorama import Fore @@ -46,7 +47,7 @@ def attempt_to_fix_json_by_finding_outermost_brackets(json_string: str): return json_string -def balance_braces(json_string: str) -> Optional[str]: +def balance_braces(json_string: str) -> str | None: """ Balance the braces in a JSON string. diff --git a/autogpt/json_fixes/parsing.py b/autogpt/json_fixes/parsing.py index 26d067939e54..0f15441160c2 100644 --- a/autogpt/json_fixes/parsing.py +++ b/autogpt/json_fixes/parsing.py @@ -1,8 +1,9 @@ """Fix and parse JSON strings.""" +from __future__ import annotations import contextlib import json -from typing import Any, Dict, Union +from typing import Any from autogpt.config import Config from autogpt.json_fixes.auto_fix import fix_json @@ -71,7 +72,7 @@ def correct_json(json_to_load: str) -> str: def fix_and_parse_json( json_to_load: str, try_to_fix_with_gpt: bool = True -) -> Union[str, Dict[Any, Any]]: +) -> str | dict[Any, Any]: """Fix and parse JSON string Args: @@ -80,7 +81,7 @@ def fix_and_parse_json( Defaults to True. Returns: - Union[str, Dict[Any, Any]]: The parsed JSON. + str or dict[Any, Any]: The parsed JSON. """ with contextlib.suppress(json.JSONDecodeError): @@ -109,7 +110,7 @@ def fix_and_parse_json( def try_ai_fix( try_to_fix_with_gpt: bool, exception: Exception, json_to_load: str -) -> Union[str, Dict[Any, Any]]: +) -> str | dict[Any, Any]: """Try to fix the JSON with the AI Args: @@ -121,7 +122,7 @@ def try_ai_fix( exception: If try_to_fix_with_gpt is False. Returns: - Union[str, Dict[Any, Any]]: The JSON string or dictionary. + str or dict[Any, Any]: The JSON string or dictionary. """ if not try_to_fix_with_gpt: raise exception diff --git a/autogpt/llm_utils.py b/autogpt/llm_utils.py index 43739009c44a..2075f93446eb 100644 --- a/autogpt/llm_utils.py +++ b/autogpt/llm_utils.py @@ -1,6 +1,7 @@ +from __future__ import annotations + from ast import List import time -from typing import Dict, Optional import openai from openai.error import APIError, RateLimitError @@ -14,7 +15,7 @@ def call_ai_function( - function: str, args: List, description: str, model: Optional[str] = None + function: str, args: list, description: str, model: str | None = None ) -> str: """Call an AI function @@ -51,15 +52,15 @@ def call_ai_function( # Overly simple abstraction until we create something better # simple retry mechanism when getting a rate error or a bad gateway def create_chat_completion( - messages: List, # type: ignore - model: Optional[str] = None, + messages: list, # type: ignore + model: str | None = None, temperature: float = CFG.temperature, - max_tokens: Optional[int] = None, + max_tokens: int | None = None, ) -> str: """Create a chat completion using the OpenAI API Args: - messages (List[Dict[str, str]]): The messages to send to the chat completion + messages (list[dict[str, str]]): The messages to send to the chat completion model (str, optional): The model to use. Defaults to None. temperature (float, optional): The temperature to use. Defaults to 0.9. max_tokens (int, optional): The max tokens to use. Defaults to None. diff --git a/autogpt/memory/local.py b/autogpt/memory/local.py index 004153c101fb..6c7ee1b36a2f 100644 --- a/autogpt/memory/local.py +++ b/autogpt/memory/local.py @@ -1,6 +1,8 @@ +from __future__ import annotations + import dataclasses import os -from typing import Any, List, Optional, Tuple +from typing import Any import numpy as np import orjson @@ -97,7 +99,7 @@ def clear(self) -> str: self.data = CacheContent() return "Obliviated" - def get(self, data: str) -> Optional[List[Any]]: + def get(self, data: str) -> list[Any] | None: """ Gets the data from the memory that is most relevant to the given data. @@ -108,7 +110,7 @@ def get(self, data: str) -> Optional[List[Any]]: """ return self.get_relevant(data, 1) - def get_relevant(self, text: str, k: int) -> List[Any]: + def get_relevant(self, text: str, k: int) -> list[Any]: """ " matrix-vector mult to find score-for-each-row-of-matrix get indices for top-k winning scores @@ -127,7 +129,7 @@ def get_relevant(self, text: str, k: int) -> List[Any]: return [self.data.texts[i] for i in top_k_indices] - def get_stats(self) -> Tuple[int, Tuple[int, ...]]: + def get_stats(self) -> tuple[int, tuple[int, ...]]: """ Returns: The stats of the local cache. """ diff --git a/autogpt/memory/no_memory.py b/autogpt/memory/no_memory.py index 0a976690536e..4035a657f0e6 100644 --- a/autogpt/memory/no_memory.py +++ b/autogpt/memory/no_memory.py @@ -1,5 +1,7 @@ """A class that does not store any data. This is the default memory provider.""" -from typing import Optional, List, Any +from __future__ import annotations + +from typing import Any from autogpt.memory.base import MemoryProviderSingleton @@ -31,7 +33,7 @@ def add(self, data: str) -> str: """ return "" - def get(self, data: str) -> Optional[List[Any]]: + def get(self, data: str) -> list[Any] | None: """ Gets the data from the memory that is most relevant to the given data. NoMemory always returns None. @@ -51,7 +53,7 @@ def clear(self) -> str: """ return "" - def get_relevant(self, data: str, num_relevant: int = 5) -> Optional[List[Any]]: + def get_relevant(self, data: str, num_relevant: int = 5) ->list[Any] | None: """ Returns all the data in the memory that is relevant to the given data. NoMemory always returns None. diff --git a/autogpt/memory/redismem.py b/autogpt/memory/redismem.py index 4d73b7411269..0e8dd71d9165 100644 --- a/autogpt/memory/redismem.py +++ b/autogpt/memory/redismem.py @@ -1,5 +1,7 @@ """Redis memory provider.""" -from typing import Any, List, Optional +from __future__ import annotations + +from typing import Any import numpy as np import redis @@ -99,7 +101,7 @@ def add(self, data: str) -> str: pipe.execute() return _text - def get(self, data: str) -> Optional[List[Any]]: + def get(self, data: str) -> list[Any] | None: """ Gets the data from the memory that is most relevant to the given data. @@ -119,7 +121,7 @@ def clear(self) -> str: self.redis.flushall() return "Obliviated" - def get_relevant(self, data: str, num_relevant: int = 5) -> Optional[List[Any]]: + def get_relevant(self, data: str, num_relevant: int = 5) -> list[Any] | None: """ Returns all the data in the memory that is relevant to the given data. Args: diff --git a/autogpt/processing/html.py b/autogpt/processing/html.py index c43a0b74e8aa..e1912b6ad42c 100644 --- a/autogpt/processing/html.py +++ b/autogpt/processing/html.py @@ -1,10 +1,11 @@ """HTML processing functions""" +from __future__ import annotations + from requests.compat import urljoin -from typing import List, Tuple from bs4 import BeautifulSoup -def extract_hyperlinks(soup: BeautifulSoup, base_url: str) -> List[Tuple[str, str]]: +def extract_hyperlinks(soup: BeautifulSoup, base_url: str) -> list[tuple[str, str]]: """Extract hyperlinks from a BeautifulSoup object Args: @@ -20,7 +21,7 @@ def extract_hyperlinks(soup: BeautifulSoup, base_url: str) -> List[Tuple[str, st ] -def format_hyperlinks(hyperlinks: List[Tuple[str, str]]) -> List[str]: +def format_hyperlinks(hyperlinks: list[tuple[str, str]]) -> list[str]: """Format hyperlinks to be displayed to the user Args: diff --git a/autogpt/promptgenerator.py b/autogpt/promptgenerator.py index 4f5186150ad2..0ad7046a0c41 100644 --- a/autogpt/promptgenerator.py +++ b/autogpt/promptgenerator.py @@ -1,6 +1,8 @@ """ A module for generating custom prompt strings.""" +from __future__ import annotations + import json -from typing import Any, Dict, List +from typing import Any class PromptGenerator: @@ -61,7 +63,7 @@ def add_command(self, command_label: str, command_name: str, args=None) -> None: self.commands.append(command) - def _generate_command_string(self, command: Dict[str, Any]) -> str: + def _generate_command_string(self, command: dict[str, Any]) -> str: """ Generate a formatted string representation of a command. @@ -94,7 +96,7 @@ def add_performance_evaluation(self, evaluation: str) -> None: """ self.performance_evaluation.append(evaluation) - def _generate_numbered_list(self, items: List[Any], item_type="list") -> str: + def _generate_numbered_list(self, items: list[Any], item_type="list") -> str: """ Generate a numbered list from given items based on the item_type. diff --git a/autogpt/token_counter.py b/autogpt/token_counter.py index a85a54be0e11..338fe6be4d47 100644 --- a/autogpt/token_counter.py +++ b/autogpt/token_counter.py @@ -1,5 +1,5 @@ """Functions for counting the number of tokens in a message or string.""" -from typing import Dict, List +from __future__ import annotations import tiktoken @@ -7,7 +7,7 @@ def count_message_tokens( - messages: List[Dict[str, str]], model: str = "gpt-3.5-turbo-0301" + messages: list[dict[str, str]], model: str = "gpt-3.5-turbo-0301" ) -> int: """ Returns the number of tokens used by a list of messages. From 1df47bb0be87bbda9b794226ceb4a2eef47ad45b Mon Sep 17 00:00:00 2001 From: BillSchumacher <34168009+BillSchumacher@users.noreply.github.com> Date: Sun, 16 Apr 2023 13:08:16 -0500 Subject: [PATCH 2/2] Add in one more place. --- autogpt/workspace.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/autogpt/workspace.py b/autogpt/workspace.py index 7913491906e8..2706b3b2db48 100644 --- a/autogpt/workspace.py +++ b/autogpt/workspace.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os from pathlib import Path