Skip to content

Commit

Permalink
Merge pull request #1968 from jayceslesar/fix/type-annotations
Browse files Browse the repository at this point in the history
unify annotations to future syntax
  • Loading branch information
BillSchumacher authored Apr 16, 2023
2 parents 13602b4 + 1df47bb commit 5802f17
Show file tree
Hide file tree
Showing 20 changed files with 86 additions and 60 deletions.
9 changes: 5 additions & 4 deletions autogpt/agent/agent_manager.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Agent manager for managing GPT agents"""
from typing import List, Tuple, Union
from __future__ import annotations

from autogpt.llm_utils import create_chat_completion
from autogpt.config.config import Singleton

Expand All @@ -14,7 +15,7 @@ def __init__(self):
# Create new GPT agent
# TODO: Centralise use of create_chat_completion() to globally enforce token limit

def create_agent(self, task: str, prompt: str, model: str) -> Tuple[int, str]:
def create_agent(self, task: str, prompt: str, model: str) -> tuple[int, str]:
"""Create a new agent and return its key
Args:
Expand Down Expand Up @@ -47,7 +48,7 @@ def create_agent(self, task: str, prompt: str, model: str) -> Tuple[int, str]:

return key, agent_reply

def message_agent(self, key: Union[str, int], message: str) -> str:
def message_agent(self, key: str | int, message: str) -> str:
"""Send a message to an agent and return its response
Args:
Expand All @@ -73,7 +74,7 @@ def message_agent(self, key: Union[str, int], message: str) -> str:

return agent_reply

def list_agents(self) -> List[Tuple[Union[str, int], str]]:
def list_agents(self) -> list[tuple[str | int, str]]:
"""Return a list of all agents
Returns:
Expand Down
4 changes: 2 additions & 2 deletions autogpt/commands/evaluate_code.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
"""Code evaluation module."""
from typing import List
from __future__ import annotations

from autogpt.llm_utils import call_ai_function


def evaluate_code(code: str) -> List[str]:
def evaluate_code(code: str) -> list[str]:
"""
A function that takes in a string and returns a response from create chat
completion api call.
Expand Down
9 changes: 6 additions & 3 deletions autogpt/commands/file_operations.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
"""File operations for AutoGPT"""
from __future__ import annotations

import os
import os.path
from pathlib import Path
from typing import Generator
from autogpt.workspace import path_in_workspace, WORKSPACE_PATH
from typing import Generator, List

LOG_FILE = "file_logger.txt"
LOG_FILE_PATH = WORKSPACE_PATH / LOG_FILE
Expand Down Expand Up @@ -187,14 +190,14 @@ def delete_file(filename: str) -> str:
return f"Error: {str(e)}"


def search_files(directory: str) -> List[str]:
def search_files(directory: str) -> list[str]:
"""Search for files in a directory
Args:
directory (str): The directory to search in
Returns:
List[str]: A list of files found in the directory
list[str]: A list of files found in the directory
"""
found_files = []

Expand Down
5 changes: 3 additions & 2 deletions autogpt/commands/google_search.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Google search command for Autogpt."""
from __future__ import annotations

import json
from typing import List, Union

from duckduckgo_search import ddg

Expand Down Expand Up @@ -33,7 +34,7 @@ def google_search(query: str, num_results: int = 8) -> str:
return json.dumps(search_results, ensure_ascii=False, indent=4)


def google_official_search(query: str, num_results: int = 8) -> Union[str, List[str]]:
def google_official_search(query: str, num_results: int = 8) -> str | list[str]:
"""Return the results of a google search using the official Google API
Args:
Expand Down
5 changes: 3 additions & 2 deletions autogpt/commands/improve_code.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from __future__ import annotations

import json
from typing import List

from autogpt.llm_utils import call_ai_function


def improve_code(suggestions: List[str], code: str) -> str:
def improve_code(suggestions: list[str], code: str) -> str:
"""
A function that takes in code and suggestions and returns a response from create
chat completion api call.
Expand Down
5 changes: 3 additions & 2 deletions autogpt/commands/web_playwright.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
"""Web scraping commands using Playwright"""
from __future__ import annotations

try:
from playwright.sync_api import sync_playwright
except ImportError:
Expand All @@ -7,7 +9,6 @@
)
from bs4 import BeautifulSoup
from autogpt.processing.html import extract_hyperlinks, format_hyperlinks
from typing import List, Union


def scrape_text(url: str) -> str:
Expand Down Expand Up @@ -45,7 +46,7 @@ def scrape_text(url: str) -> str:
return text


def scrape_links(url: str) -> Union[str, List[str]]:
def scrape_links(url: str) -> str | list[str]:
"""Scrape links from a webpage
Args:
Expand Down
11 changes: 6 additions & 5 deletions autogpt/commands/web_requests.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Browse a webpage and summarize it using the LLM model"""
from typing import List, Tuple, Union
from __future__ import annotations

from urllib.parse import urljoin, urlparse

import requests
Expand Down Expand Up @@ -66,15 +67,15 @@ def check_local_file_access(url: str) -> bool:

def get_response(
url: str, timeout: int = 10
) -> Union[Tuple[None, str], Tuple[Response, None]]:
) -> tuple[None, str] | tuple[Response, None]:
"""Get the response from a URL
Args:
url (str): The URL to get the response from
timeout (int): The timeout for the HTTP request
Returns:
Tuple[None, str] | Tuple[Response, None]: The response and error message
tuple[None, str] | tuple[Response, None]: The response and error message
Raises:
ValueError: If the URL is invalid
Expand Down Expand Up @@ -136,14 +137,14 @@ def scrape_text(url: str) -> str:
return text


def scrape_links(url: str) -> Union[str, List[str]]:
def scrape_links(url: str) -> str | list[str]:
"""Scrape links from a webpage
Args:
url (str): The URL to scrape links from
Returns:
Union[str, List[str]]: The scraped links
str | list[str]: The scraped links
"""
response, error_message = get_response(url)
if error_message:
Expand Down
9 changes: 5 additions & 4 deletions autogpt/commands/web_selenium.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
"""Selenium web scraping module."""
from __future__ import annotations

from selenium import webdriver
from autogpt.processing.html import extract_hyperlinks, format_hyperlinks
import autogpt.processing.text as summary
Expand All @@ -15,13 +17,12 @@
import logging
from pathlib import Path
from autogpt.config import Config
from typing import List, Tuple, Union

FILE_DIR = Path(__file__).parent.parent
CFG = Config()


def browse_website(url: str, question: str) -> Tuple[str, WebDriver]:
def browse_website(url: str, question: str) -> tuple[str, WebDriver]:
"""Browse a website and return the answer and links to the user
Args:
Expand All @@ -43,7 +44,7 @@ def browse_website(url: str, question: str) -> Tuple[str, WebDriver]:
return f"Answer gathered from website: {summary_text} \n \n Links: {links}", driver


def scrape_text_with_selenium(url: str) -> Tuple[WebDriver, str]:
def scrape_text_with_selenium(url: str) -> tuple[WebDriver, str]:
"""Scrape text from a website using selenium
Args:
Expand Down Expand Up @@ -97,7 +98,7 @@ def scrape_text_with_selenium(url: str) -> Tuple[WebDriver, str]:
return driver, text


def scrape_links_with_selenium(driver: WebDriver, url: str) -> List[str]:
def scrape_links_with_selenium(driver: WebDriver, url: str) -> list[str]:
"""Scrape links from a website using selenium
Args:
Expand Down
7 changes: 4 additions & 3 deletions autogpt/commands/write_tests.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
"""A module that contains a function to generate test cases for the submitted code."""
from __future__ import annotations

import json
from typing import List
from autogpt.llm_utils import call_ai_function


def write_tests(code: str, focus: List[str]) -> str:
def write_tests(code: str, focus: list[str]) -> str:
"""
A function that takes in code and focus topics and returns a response from create
chat completion api call.
Parameters:
focus (List): A list of suggestions around what needs to be improved.
focus (list): A list of suggestions around what needs to be improved.
code (str): Code for test cases to be generated against.
Returns:
A result string from create chat completion. Test cases for the submitted code
Expand Down
6 changes: 4 additions & 2 deletions autogpt/config/ai_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,10 @@
"""
A module that contains the AIConfig class object that contains the configuration
"""
from __future__ import annotations

import os
from typing import List, Optional, Type
from typing import Type
import yaml


Expand All @@ -18,7 +20,7 @@ class AIConfig:
"""

def __init__(
self, ai_name: str = "", ai_role: str = "", ai_goals: Optional[List] = None
self, ai_name: str = "", ai_role: str = "", ai_goals: list | None = None
) -> None:
"""
Initialize a class instance
Expand Down
5 changes: 3 additions & 2 deletions autogpt/json_fixes/bracket_termination.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
"""Fix JSON brackets."""
from __future__ import annotations

import contextlib
import json
from typing import Optional
import regex
from colorama import Fore

Expand Down Expand Up @@ -46,7 +47,7 @@ def attempt_to_fix_json_by_finding_outermost_brackets(json_string: str):
return json_string


def balance_braces(json_string: str) -> Optional[str]:
def balance_braces(json_string: str) -> str | None:
"""
Balance the braces in a JSON string.
Expand Down
11 changes: 6 additions & 5 deletions autogpt/json_fixes/parsing.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
"""Fix and parse JSON strings."""
from __future__ import annotations

import contextlib
import json
from typing import Any, Dict, Union
from typing import Any

from autogpt.config import Config
from autogpt.json_fixes.auto_fix import fix_json
Expand Down Expand Up @@ -71,7 +72,7 @@ def correct_json(json_to_load: str) -> str:

def fix_and_parse_json(
json_to_load: str, try_to_fix_with_gpt: bool = True
) -> Union[str, Dict[Any, Any]]:
) -> str | dict[Any, Any]:
"""Fix and parse JSON string
Args:
Expand All @@ -80,7 +81,7 @@ def fix_and_parse_json(
Defaults to True.
Returns:
Union[str, Dict[Any, Any]]: The parsed JSON.
str or dict[Any, Any]: The parsed JSON.
"""

with contextlib.suppress(json.JSONDecodeError):
Expand Down Expand Up @@ -109,7 +110,7 @@ def fix_and_parse_json(

def try_ai_fix(
try_to_fix_with_gpt: bool, exception: Exception, json_to_load: str
) -> Union[str, Dict[Any, Any]]:
) -> str | dict[Any, Any]:
"""Try to fix the JSON with the AI
Args:
Expand All @@ -121,7 +122,7 @@ def try_ai_fix(
exception: If try_to_fix_with_gpt is False.
Returns:
Union[str, Dict[Any, Any]]: The JSON string or dictionary.
str or dict[Any, Any]: The JSON string or dictionary.
"""
if not try_to_fix_with_gpt:
raise exception
Expand Down
13 changes: 7 additions & 6 deletions autogpt/llm_utils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

from ast import List
import time
from typing import Dict, Optional

import openai
from openai.error import APIError, RateLimitError
Expand All @@ -14,7 +15,7 @@


def call_ai_function(
function: str, args: List, description: str, model: Optional[str] = None
function: str, args: list, description: str, model: str | None = None
) -> str:
"""Call an AI function
Expand Down Expand Up @@ -51,15 +52,15 @@ def call_ai_function(
# Overly simple abstraction until we create something better
# simple retry mechanism when getting a rate error or a bad gateway
def create_chat_completion(
messages: List, # type: ignore
model: Optional[str] = None,
messages: list, # type: ignore
model: str | None = None,
temperature: float = CFG.temperature,
max_tokens: Optional[int] = None,
max_tokens: int | None = None,
) -> str:
"""Create a chat completion using the OpenAI API
Args:
messages (List[Dict[str, str]]): The messages to send to the chat completion
messages (list[dict[str, str]]): The messages to send to the chat completion
model (str, optional): The model to use. Defaults to None.
temperature (float, optional): The temperature to use. Defaults to 0.9.
max_tokens (int, optional): The max tokens to use. Defaults to None.
Expand Down
10 changes: 6 additions & 4 deletions autogpt/memory/local.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from __future__ import annotations

import dataclasses
import os
from typing import Any, List, Optional, Tuple
from typing import Any

import numpy as np
import orjson
Expand Down Expand Up @@ -97,7 +99,7 @@ def clear(self) -> str:
self.data = CacheContent()
return "Obliviated"

def get(self, data: str) -> Optional[List[Any]]:
def get(self, data: str) -> list[Any] | None:
"""
Gets the data from the memory that is most relevant to the given data.
Expand All @@ -108,7 +110,7 @@ def get(self, data: str) -> Optional[List[Any]]:
"""
return self.get_relevant(data, 1)

def get_relevant(self, text: str, k: int) -> List[Any]:
def get_relevant(self, text: str, k: int) -> list[Any]:
""" "
matrix-vector mult to find score-for-each-row-of-matrix
get indices for top-k winning scores
Expand All @@ -127,7 +129,7 @@ def get_relevant(self, text: str, k: int) -> List[Any]:

return [self.data.texts[i] for i in top_k_indices]

def get_stats(self) -> Tuple[int, Tuple[int, ...]]:
def get_stats(self) -> tuple[int, tuple[int, ...]]:
"""
Returns: The stats of the local cache.
"""
Expand Down
Loading

0 comments on commit 5802f17

Please sign in to comment.