diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index d2e4480e5..97272f3e4 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -23,7 +23,7 @@ jobs: strategy: matrix: os: [ubuntu-latest] - python: ["3.10", "3.11"] + python: ["3.10", "3.11", "3.12"] package_name: ["pyrit"] runs-on: ${{ matrix.os }} # EnricoMi/publish-unit-test-result-action@v2 requires the following permissions diff --git a/doc/contributing/installation.md b/doc/contributing/installation.md index 353c31087..e154c3330 100644 --- a/doc/contributing/installation.md +++ b/doc/contributing/installation.md @@ -15,7 +15,7 @@ This is a list of the prerequisites needed to run this library. git clone https://github.com/Azure/PyRIT ``` -Note: PyRIT requires Python version 3.11. If using Conda, you'll set the environment to use this version. If running PyRIT outside of a python environment, make sure you have this version installed. +Note: PyRIT requires Python version 3.10, 3.11, or 3.12. If using Conda, you'll set the environment to use this version. If running PyRIT outside of a python environment, make sure you have this version installed. ## Installation diff --git a/doc/setup/install_pyrit.md b/doc/setup/install_pyrit.md index 9c259f32a..0efa1c389 100644 --- a/doc/setup/install_pyrit.md +++ b/doc/setup/install_pyrit.md @@ -1,6 +1,6 @@ # Install PyRIT -To install PyRIT using pip, make sure you have Python 3.11 installed using `python --version`. +To install PyRIT using pip, make sure you have Python 3.10, 3.11, or 3.12 installed using `python --version`. Alternatively, create a conda environment as follows ``` diff --git a/pyproject.toml b/pyproject.toml index 3c7cbc28e..8231a66e0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,8 +30,9 @@ classifiers = [ "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ] -requires-python = ">=3.10, <3.12" +requires-python = ">=3.10, <3.13" dependencies = [ "aioconsole>=0.7.1", "appdirs>=1.4.0", diff --git a/tests/orchestrator/test_xpia_orchestrator.py b/tests/orchestrator/test_xpia_orchestrator.py index b96c4e3b8..454c0cbfa 100644 --- a/tests/orchestrator/test_xpia_orchestrator.py +++ b/tests/orchestrator/test_xpia_orchestrator.py @@ -34,7 +34,7 @@ def processing_target() -> PromptTarget: @pytest.fixture def success_scorer() -> Scorer: mock_score = MagicMock(Score) - mock_score.score_value = True + mock_score.score_value = "True" mock_score.score_type = "true_false" mock_score.get_value.return_value = True @@ -81,8 +81,8 @@ async def processing_callback(): processing_callback=processing_callback, ) score = await xpia_orchestrator.execute_async() - assert score.score_value - assert success_scorer.score_text_async.called_once + assert score.get_value() + success_scorer.score_text_async.assert_called_once() @pytest.mark.asyncio @@ -96,36 +96,36 @@ async def test_xpia_manual_processing_orchestrator_execute(mock_input_async, att score = await xpia_orchestrator.execute_async() - assert score.score_value - assert success_scorer.score_text_async.called_once + assert score.get_value() + success_scorer.score_text_async.assert_called_once() mock_input_async.assert_awaited_once() @pytest.mark.asyncio async def test_xpia_test_orchestrator_execute(attack_setup_target, processing_target, success_scorer): - mock_send_to_processing_target = AsyncMock() - mock_send_to_processing_target.return_value = AsyncMock( - request_pieces=[AsyncMock(converted_value="mocked_processing_response")] - ) - - with patch.object( - XPIATestOrchestrator, "_process_async", new_callable=AsyncMock, return_value="mocked_processing_response" - ) as mock_process_async: - xpia_orchestrator = XPIATestOrchestrator( - attack_content="test", - processing_prompt="some instructions and the required ", - processing_target=processing_target, - attack_setup_target=attack_setup_target, - scorer=success_scorer, + with patch.object(processing_target, "send_prompt_async", new_callable=AsyncMock) as mock_send_to_processing_target: + mock_send_to_processing_target.return_value = AsyncMock( + request_pieces=[AsyncMock(converted_value="mocked_processing_response")] ) - score = await xpia_orchestrator.execute_async() + with patch.object( + XPIATestOrchestrator, "_process_async", new_callable=AsyncMock, return_value="mocked_processing_response" + ) as mock_process_async: + xpia_orchestrator = XPIATestOrchestrator( + attack_content="test", + processing_prompt="some instructions and the required ", + processing_target=processing_target, + attack_setup_target=attack_setup_target, + scorer=success_scorer, + ) + + score = await xpia_orchestrator.execute_async() - assert score is not None - assert score.score_value - assert success_scorer.score_text_async.called_once - assert mock_send_to_processing_target.called_once - assert mock_process_async.called_once + assert score is not None + assert score.get_value() + success_scorer.score_text_async.assert_called_once() + mock_send_to_processing_target.assert_not_called() + mock_process_async.assert_called_once() @pytest.mark.asyncio @@ -146,7 +146,7 @@ async def test_xpia_orchestrator_process_async(attack_setup_target, processing_t scorer=success_scorer, ) score = await xpia_orchestrator.execute_async() - assert score.score_value - assert success_scorer.score_text_async.called_once() - assert mock_send_to_processing_target.called_once() + assert score.get_value() + success_scorer.score_text_async.assert_called_once() + mock_send_to_processing_target.assert_not_called() mock_process_async.assert_awaited_once()