diff --git a/config b/config index 7871531..5af2578 100644 --- a/config +++ b/config @@ -1,7 +1,7 @@ # OpenAI stuff KEYFILE="./openai_key.txt" MODEL_NAME="lunademo" -LOCAL_AI_URL="http://localhost:8080/v1" +LOCAL_AI_URL=http://172.29.221.52:5000/v1/ # General INTERACTIVE="no" diff --git a/examples/webgpt.py b/examples/webgpt.py index 9861de0..36b5d5f 100644 --- a/examples/webgpt.py +++ b/examples/webgpt.py @@ -1,6 +1,7 @@ """ WebGPT is an AI driven enterprise that develop website for its clients. """ + import os import openai from gpt_enterprise.gpt_utils import ( diff --git a/python/gpt_enterprise/__init__.py b/python/gpt_enterprise/__init__.py index b2f390e..178a562 100644 --- a/python/gpt_enterprise/__init__.py +++ b/python/gpt_enterprise/__init__.py @@ -13,4 +13,5 @@ The scrum master will then create a sequence of tasks, each associated to an employee previously hired by the team leader.\n At the end, all tasks result will be returned to the CEO.\n """ + from .__main__ import main diff --git a/python/gpt_enterprise/__main__.py b/python/gpt_enterprise/__main__.py index 53b2d38..06e7489 100644 --- a/python/gpt_enterprise/__main__.py +++ b/python/gpt_enterprise/__main__.py @@ -1,6 +1,7 @@ """ GPTenterprise is an AI driven enterprise. """ + import os import sys import asyncio @@ -23,10 +24,8 @@ def main(): if local_ai_url: # Change api url to LoacalAi one - openai.api_base = local_ai_url + openai.base_url = local_ai_url openai.api_key = "sx-xxx" - OPENAI_API_KEY = "sx-xxx" - os.environ['OPENAI_API_KEY'] = OPENAI_API_KEY else: # Initialize openai api_key with open("./openai_key.txt", "r") as file: diff --git a/python/gpt_enterprise/employee.py b/python/gpt_enterprise/employee.py index 65e5e6b..c8b1f67 100644 --- a/python/gpt_enterprise/employee.py +++ b/python/gpt_enterprise/employee.py @@ -3,6 +3,7 @@ Employee \U0001F469 """ + import os from typing import List, Tuple diff --git a/python/gpt_enterprise/enterprise.py b/python/gpt_enterprise/enterprise.py index 31750db..21dae81 100644 --- a/python/gpt_enterprise/enterprise.py +++ b/python/gpt_enterprise/enterprise.py @@ -3,6 +3,7 @@ Enterprise \U0001F3E2 """ + import os from gpt_enterprise.team_leader import TeamLeader diff --git a/python/gpt_enterprise/gpt_utils.py b/python/gpt_enterprise/gpt_utils.py index 5473576..e321718 100644 --- a/python/gpt_enterprise/gpt_utils.py +++ b/python/gpt_enterprise/gpt_utils.py @@ -3,6 +3,7 @@ GPT utils \U0001F9E0 """ + import os import openai import requests @@ -13,7 +14,10 @@ def generate_text( - system_prompt: str, user_prompt: str, temperature: float, model: str = os.getenv("MODEL_NAME", "gpt-3.5-turbo-16k") + system_prompt: str, + user_prompt: str, + temperature: float, + model: str = os.getenv("MODEL_NAME", "gpt-3.5-turbo-16k"), ) -> Generator: """ @@ -27,7 +31,7 @@ def generate_text( Returns: Generator: GPT response object """ - response = openai.ChatCompletion.create( + response = openai.chat.completions.create( model=model, messages=[ # Initialize GPT with system prompt @@ -67,39 +71,44 @@ def generate_image( """ # Ask ChatGPT a prompt to generate image with DALL-E with open(os.path.join(EMPLOYEE_PROMPTS_PATH, "dall_e_prompter.txt"), "r") as file: - response = openai.ChatCompletion.create( + response = openai.chat.completions.create( model=os.getenv("MODEL_NAME", "gpt-3.5-turbo-16k"), messages=[ # Initialize ChatGPT to be a helpful assistant but that it remains the employee { "role": "system", - "content": f"{file.read()}" - + f" You are also {system_prompt} But keep in mind that {file.read()}" - if system_prompt - else "", + "content": ( + f"{file.read()}" + + f" You are also {system_prompt} But keep in mind that {file.read()}" + if system_prompt + else "" + ), }, # Generate a subject {"role": "user", "content": f"SUBJECT {user_prompt}"}, ], ) - # Create images, troncate prompt to 70 characters - # to be sure it will be accepted by DALL-E - image_response = openai.Image.create( - prompt=response.choices[0].message.content[:70], - n=nb_image, - size="1024x1024", - ) - generated_image_names = [] - # Download images - for index, image in enumerate(image_response["data"]): - img_data = requests.get(image["url"]).content - img_name = f"{base_name}_{index}.jpg" - img_path = os.path.join(output_directory, img_name) - with open(img_path, "wb") as handler: - handler.write(img_data) - generated_image_names.append(f"./{img_name}") + try: + # Create images, troncate prompt to 70 characters + # to be sure it will be accepted by DALL-E + image_response = openai.Image.create( + prompt=response.choices[0].message.content[:70], + n=nb_image, + size="1024x1024", + ) + + # Download images + for index, image in enumerate(image_response["data"]): + img_data = requests.get(image["url"]).content + img_name = f"{base_name}_{index}.jpg" + img_path = os.path.join(output_directory, img_name) + with open(img_path, "wb") as handler: + handler.write(img_data) + generated_image_names.append(f"./{img_name}") + except Exception as error: + print(error) return response.choices[0].message.content, generated_image_names diff --git a/python/gpt_enterprise/scrum_master.py b/python/gpt_enterprise/scrum_master.py index a06549f..f25cdd3 100644 --- a/python/gpt_enterprise/scrum_master.py +++ b/python/gpt_enterprise/scrum_master.py @@ -3,6 +3,7 @@ Scrum Master \U0001F3C2 """ + import os import ast import time @@ -41,9 +42,11 @@ def __init__( """ with open( os.path.join( - MANAGER_PROMPTS_PATH - if not os.getenv("CUSTOM_MANAGER_PROMPTS_PATH") - else os.getenv("CUSTOM_MANAGER_PROMPTS_PATH"), + ( + MANAGER_PROMPTS_PATH + if not os.getenv("CUSTOM_MANAGER_PROMPTS_PATH") + else os.getenv("CUSTOM_MANAGER_PROMPTS_PATH") + ), "scrum_master.txt", ), "r", diff --git a/python/gpt_enterprise/team_leader.py b/python/gpt_enterprise/team_leader.py index 5c011b3..8772528 100644 --- a/python/gpt_enterprise/team_leader.py +++ b/python/gpt_enterprise/team_leader.py @@ -3,6 +3,7 @@ Team leader \U0001F57A """ + import os import ast import json @@ -37,9 +38,11 @@ def __init__( """ with open( os.path.join( - MANAGER_PROMPTS_PATH - if not os.getenv("CUSTOM_MANAGER_PROMPTS_PATH") - else os.getenv("CUSTOM_MANAGER_PROMPTS_PATH"), + ( + MANAGER_PROMPTS_PATH + if not os.getenv("CUSTOM_MANAGER_PROMPTS_PATH") + else os.getenv("CUSTOM_MANAGER_PROMPTS_PATH") + ), "team_leader.txt", ), "r", diff --git a/python/pyproject.toml b/python/pyproject.toml new file mode 100644 index 0000000..4d8502b --- /dev/null +++ b/python/pyproject.toml @@ -0,0 +1,49 @@ +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[project] +name = "GPTenterprise" +version = "0.1.0b1" +authors = [ + {name = "nervousapps (Achille Pénet)", email = "achille.penet@icloud.com"} +] +description = "Emulating an enterprise with OpenaAI GPT." +classifiers = [ + "License :: OSI Approved :: MIT License", +] +readme = "README.md" +dependencies = [ + "python-dotenv", + "openai", + "requests", +] +requires-python=">= 3.9" + +[project.urls] +Source = "https://github.com/nervousapps/GPTenterprise" +Documentation = "https://nervousapps.github.io/GPTenterprise" + +[project.optional-dependencies] +test = [ + "pytest", + "pytest-mock", + "pytest-cov", + "pytest-asyncio", +] +doc = ["black", "pdoc"] + +[tool.setuptools] +include-package-data = true + +[tool.setuptools.packages.find] +include = ['gpt_enterprise*'] +exclude = ['tests'] + +[tool.setuptools.package-data] +"gpt_enterprise.prompts" = ["*.txt"] + +[project.scripts] +GPTenterprise = "gpt_enterprise:main" + + diff --git a/python/requirements-docs.in b/python/requirements-docs.in deleted file mode 100644 index 6fd73b1..0000000 --- a/python/requirements-docs.in +++ /dev/null @@ -1,2 +0,0 @@ -black -pdoc \ No newline at end of file diff --git a/python/requirements-docs.txt b/python/requirements-docs.txt index 957281a..0322f63 100644 --- a/python/requirements-docs.txt +++ b/python/requirements-docs.txt @@ -1,28 +1,84 @@ # -# This file is autogenerated by pip-compile with Python 3.11 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --output-file=./python/requirements-docs.txt ./python/requirements-docs.in +# pip-compile --extra=doc --output-file=./python/requirements-docs.txt ./python/pyproject.toml # -black==23.3.0 - # via -r ./python/requirements-docs.in -click==8.1.3 +annotated-types==0.6.0 + # via pydantic +anyio==4.2.0 + # via + # httpx + # openai +black==23.12.1 + # via GPTenterprise (python/pyproject.toml) +certifi==2023.11.17 + # via + # httpcore + # httpx + # requests +charset-normalizer==3.3.2 + # via requests +click==8.1.7 # via black -jinja2==3.1.2 +distro==1.9.0 + # via openai +exceptiongroup==1.2.0 + # via anyio +h11==0.14.0 + # via httpcore +httpcore==1.0.2 + # via httpx +httpx==0.26.0 + # via openai +idna==3.6 + # via + # anyio + # httpx + # requests +jinja2==3.1.3 # via pdoc -markupsafe==2.1.2 +markupsafe==2.1.3 # via # jinja2 # pdoc mypy-extensions==1.0.0 # via black -packaging==23.1 +openai==1.7.2 + # via GPTenterprise (python/pyproject.toml) +packaging==23.2 # via black -pathspec==0.11.1 +pathspec==0.12.1 # via black -pdoc==13.1.0 - # via -r ./python/requirements-docs.in -platformdirs==3.2.0 +pdoc==14.3.0 + # via GPTenterprise (python/pyproject.toml) +platformdirs==4.1.0 # via black -pygments==2.15.1 +pydantic==2.5.3 + # via openai +pydantic-core==2.14.6 + # via pydantic +pygments==2.17.2 # via pdoc +python-dotenv==1.0.0 + # via GPTenterprise (python/pyproject.toml) +requests==2.31.0 + # via GPTenterprise (python/pyproject.toml) +sniffio==1.3.0 + # via + # anyio + # httpx + # openai +tomli==2.0.1 + # via black +tqdm==4.66.1 + # via openai +typing-extensions==4.9.0 + # via + # anyio + # black + # openai + # pydantic + # pydantic-core +urllib3==2.2.1 + # via requests diff --git a/python/requirements-tests.in b/python/requirements-tests.in deleted file mode 100644 index 235207b..0000000 --- a/python/requirements-tests.in +++ /dev/null @@ -1,5 +0,0 @@ --rrequirements.txt -pytest -pytest-mock -pytest-cov -pytest-asyncio \ No newline at end of file diff --git a/python/requirements-tests.txt b/python/requirements-tests.txt index 46beee1..31559c2 100644 --- a/python/requirements-tests.txt +++ b/python/requirements-tests.txt @@ -1,86 +1,85 @@ # -# This file is autogenerated by pip-compile with Python 3.11 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --output-file=./python/requirements-tests.txt ./python/requirements-tests.in +# pip-compile --extra=test --output-file=./python/requirements-tests.txt ./python/pyproject.toml # -aiohttp==3.8.4 +annotated-types==0.6.0 + # via pydantic +anyio==4.2.0 # via - # -r ./python/requirements.txt + # httpx # openai -aiosignal==1.3.1 +certifi==2023.11.17 # via - # -r ./python/requirements.txt - # aiohttp -async-timeout==4.0.2 - # via - # -r ./python/requirements.txt - # aiohttp -attrs==23.1.0 - # via - # -r ./python/requirements.txt - # aiohttp -certifi==2022.12.7 - # via - # -r ./python/requirements.txt - # requests -charset-normalizer==3.1.0 - # via - # -r ./python/requirements.txt - # aiohttp + # httpcore + # httpx # requests -coverage[toml]==7.2.3 +charset-normalizer==3.3.2 + # via requests +coverage[toml]==7.4.0 # via pytest-cov -frozenlist==1.3.3 +distro==1.9.0 + # via openai +exceptiongroup==1.2.0 # via - # -r ./python/requirements.txt - # aiohttp - # aiosignal -idna==3.4 + # anyio + # pytest +h11==0.14.0 + # via httpcore +httpcore==1.0.2 + # via httpx +httpx==0.26.0 + # via openai +idna==3.6 # via - # -r ./python/requirements.txt + # anyio + # httpx # requests - # yarl iniconfig==2.0.0 # via pytest -multidict==6.0.4 - # via - # -r ./python/requirements.txt - # aiohttp - # yarl -openai==0.27.4 - # via -r ./python/requirements.txt -packaging==23.1 +openai==1.7.2 + # via GPTenterprise (python/pyproject.toml) +packaging==23.2 # via pytest -pluggy==1.0.0 +pluggy==1.3.0 # via pytest -pytest==7.3.1 +pydantic==2.5.3 + # via openai +pydantic-core==2.14.6 + # via pydantic +pytest==7.4.4 # via - # -r ./python/requirements-tests.in + # GPTenterprise (python/pyproject.toml) # pytest-asyncio # pytest-cov # pytest-mock -pytest-asyncio==0.21.0 - # via -r ./python/requirements-tests.in -pytest-cov==4.0.0 - # via -r ./python/requirements-tests.in -pytest-mock==3.10.0 - # via -r ./python/requirements-tests.in +pytest-asyncio==0.23.3 + # via GPTenterprise (python/pyproject.toml) +pytest-cov==4.1.0 + # via GPTenterprise (python/pyproject.toml) +pytest-mock==3.12.0 + # via GPTenterprise (python/pyproject.toml) python-dotenv==1.0.0 - # via -r ./python/requirements.txt -requests==2.28.2 + # via GPTenterprise (python/pyproject.toml) +requests==2.31.0 + # via GPTenterprise (python/pyproject.toml) +sniffio==1.3.0 # via - # -r ./python/requirements.txt + # anyio + # httpx # openai -tqdm==4.65.0 +tomli==2.0.1 # via - # -r ./python/requirements.txt - # openai -urllib3==1.26.15 + # coverage + # pytest +tqdm==4.66.1 + # via openai +typing-extensions==4.9.0 # via - # -r ./python/requirements.txt - # requests -yarl==1.8.2 - # via - # -r ./python/requirements.txt - # aiohttp + # anyio + # openai + # pydantic + # pydantic-core +urllib3==2.2.1 + # via requests diff --git a/python/requirements.txt b/python/requirements.txt index dc6de94..4ae13d0 100644 --- a/python/requirements.txt +++ b/python/requirements.txt @@ -1,44 +1,59 @@ # -# This file is autogenerated by pip-compile with Python 3.11 +# This file is autogenerated by pip-compile with Python 3.10 # by the following command: # -# pip-compile --output-file=./python/requirements.txt ./python/setup.py +# pip-compile --output-file=./python/requirements.txt ./python/pyproject.toml # -aiohttp==3.8.4 - # via openai -aiosignal==1.3.1 - # via aiohttp -async-timeout==4.0.2 - # via aiohttp -attrs==23.1.0 - # via aiohttp -certifi==2022.12.7 - # via requests -charset-normalizer==3.1.0 - # via - # aiohttp - # requests -frozenlist==1.3.3 +annotated-types==0.6.0 + # via pydantic +anyio==4.2.0 # via - # aiohttp - # aiosignal -idna==3.4 + # httpx + # openai +certifi==2023.11.17 # via + # httpcore + # httpx # requests - # yarl -multidict==6.0.4 +charset-normalizer==3.3.2 + # via requests +distro==1.9.0 + # via openai +exceptiongroup==1.2.0 + # via anyio +h11==0.14.0 + # via httpcore +httpcore==1.0.2 + # via httpx +httpx==0.26.0 + # via openai +idna==3.6 # via - # aiohttp - # yarl -openai==0.27.4 - # via GPTenterprise (./python/setup.py) -python-dotenv==1.0.0 - # via GPTenterprise (./python/setup.py) -requests==2.28.2 + # anyio + # httpx + # requests +openai==1.7.2 + # via GPTenterprise (python/pyproject.toml) +pydantic==2.5.3 # via openai -tqdm==4.65.0 +pydantic-core==2.14.6 + # via pydantic +python-dotenv==1.0.0 + # via GPTenterprise (python/pyproject.toml) +requests==2.31.0 + # via GPTenterprise (python/pyproject.toml) +sniffio==1.3.0 + # via + # anyio + # httpx + # openai +tqdm==4.66.1 # via openai -urllib3==1.26.15 +typing-extensions==4.9.0 + # via + # anyio + # openai + # pydantic + # pydantic-core +urllib3==2.2.1 # via requests -yarl==1.8.2 - # via aiohttp diff --git a/python/setup.py b/python/setup.py deleted file mode 100644 index 052815e..0000000 --- a/python/setup.py +++ /dev/null @@ -1,31 +0,0 @@ -""" -Module setup -""" -from setuptools import setup, find_packages - -REQUIREMENTS = [ - "python-dotenv", - "openai", -] - -setup( - name="GPTenterprise", - version="1.0-b1", - description="Emulating an enterprise with OpenaAI GPT.", - author="nervousapps (Achille Pénet)", - author_email="achille.penet@icloud.com", - url="https://github.com/nervousapps/GPTenterprise", - packages=find_packages(), - package_data={ - # If any package contains *.txt or *.rst files, include them: - "": ["*.txt", "./prompts/*/*.txt"], - }, - include_package_data=True, - install_requires=REQUIREMENTS, - entry_points={ - "console_scripts": [ - "GPTenterprise = gpt_enterprise:main", - ], - }, - python_requires=">= 3.9", -) diff --git a/python/tests/conftest.py b/python/tests/conftest.py index 2985c72..7235974 100644 --- a/python/tests/conftest.py +++ b/python/tests/conftest.py @@ -15,10 +15,9 @@ # Change api url to LoacalAi one -openai.api_base = "local_ai_url" +openai.base_url = "local_ai_url" openai.api_key = "sx-xxx" -OPENAI_API_KEY = "sx-xxx" -os.environ['OPENAI_API_KEY'] = OPENAI_API_KEY + @pytest.fixture def scrum_master_test(): diff --git a/python/tests/test_employee.py b/python/tests/test_employee.py index bf44d1d..be0102e 100644 --- a/python/tests/test_employee.py +++ b/python/tests/test_employee.py @@ -1,6 +1,7 @@ """ Test cases for the employee module """ + import os import pytest from gpt_enterprise.employee import Employee diff --git a/python/tests/test_enterprise.py b/python/tests/test_enterprise.py index a37fc2c..e4da141 100644 --- a/python/tests/test_enterprise.py +++ b/python/tests/test_enterprise.py @@ -1,6 +1,7 @@ """ Test enterprise class """ + import os import ast import asyncio diff --git a/python/tests/test_gpt_utils.py b/python/tests/test_gpt_utils.py index bb0935d..882e6bb 100644 --- a/python/tests/test_gpt_utils.py +++ b/python/tests/test_gpt_utils.py @@ -1,6 +1,7 @@ """ Test gpt utilities """ + import time import pytest from gpt_enterprise.gpt_utils import generate_image, generate_text @@ -18,7 +19,7 @@ def mock_generate_text(model: str, messages, temperature: float) -> str: return mock_open_ai_response_object(mocker=mocker, content="Do something") mocker.patch( - "gpt_enterprise.gpt_utils.openai.ChatCompletion.create", mock_generate_text + "gpt_enterprise.gpt_utils.openai.chat.completions.create", mock_generate_text ) response = generate_text( system_prompt="test", user_prompt="test", model="test", temperature=1.0 @@ -36,7 +37,7 @@ def mock_generate_text(model: str, messages) -> str: return mock_open_ai_response_object(mocker=mocker, content="Do something") mocker.patch( - "gpt_enterprise.gpt_utils.openai.ChatCompletion.create", mock_generate_text + "gpt_enterprise.gpt_utils.openai.chat.completions.create", mock_generate_text ) mocker.patch( "gpt_enterprise.gpt_utils.openai.Image.create", return_value={"data": []} diff --git a/python/tests/test_scrum_master.py b/python/tests/test_scrum_master.py index 5f0a800..48d7a38 100644 --- a/python/tests/test_scrum_master.py +++ b/python/tests/test_scrum_master.py @@ -1,6 +1,7 @@ """ Test manager """ + import os import ast import pytest @@ -64,10 +65,7 @@ def test_do_plan(mocker, scrum_master_test, fake_employees): # TODO: add parametrize with test files @pytest.mark.asyncio -@pytest.mark.parametrize("test_file", [ - "tasks.txt", - "tasks_requirements_list.txt" -]) +@pytest.mark.parametrize("test_file", ["tasks.txt", "tasks_requirements_list.txt"]) async def test_do_plan_async(mocker, scrum_master_test, fake_employees, test_file): # Mock function and method that requests openai API (to avoid costs) mocker.patch( diff --git a/python/tests/test_team_leader.py b/python/tests/test_team_leader.py index c54a941..7e239ad 100644 --- a/python/tests/test_team_leader.py +++ b/python/tests/test_team_leader.py @@ -1,6 +1,7 @@ """ Test manager """ + import os import ast import pytest diff --git a/reqs_black_doc.sh b/reqs_black_doc.sh index 8014f8f..3313fd1 100755 --- a/reqs_black_doc.sh +++ b/reqs_black_doc.sh @@ -1,5 +1,5 @@ -pip-compile ./python/setup.py -o ./python/requirements.txt -pip-compile ./python/requirements-tests.in -o ./python/requirements-tests.txt -pip-compile ./python/requirements-docs.in -o ./python/requirements-docs.txt +pip-compile ./python/pyproject.toml -o ./python/requirements.txt +pip-compile --extra test ./python/pyproject.toml -o ./python/requirements-tests.txt +pip-compile --extra doc ./python/pyproject.toml -o ./python/requirements-docs.txt black ./ doctoc README.md \ No newline at end of file