diff --git a/.gitattributes b/.gitattributes index f18b519846cb..c025026c544d 100644 --- a/.gitattributes +++ b/.gitattributes @@ -3,3 +3,6 @@ frontend/build/** linguist-generated **/poetry.lock linguist-generated docs/_javascript/** linguist-vendored + +# Exclude VCR cassettes from stats +forge/tests/vcr_cassettes/**/**.y*ml linguist-generated diff --git a/.github/workflows/autogpt-ci.yml b/.github/workflows/autogpt-ci.yml index 5a7d032ae45a..8b92435eba9f 100644 --- a/.github/workflows/autogpt-ci.yml +++ b/.github/workflows/autogpt-ci.yml @@ -6,13 +6,11 @@ on: paths: - '.github/workflows/autogpt-ci.yml' - 'autogpt/**' - - '!autogpt/tests/vcr_cassettes' pull_request: branches: [ master, development, release-* ] paths: - '.github/workflows/autogpt-ci.yml' - 'autogpt/**' - - '!autogpt/tests/vcr_cassettes' concurrency: group: ${{ format('autogpt-ci-{0}', github.head_ref && format('{0}-{1}', github.event_name, github.event.pull_request.number) || github.sha) }} @@ -73,37 +71,6 @@ jobs: git config --global user.name "Auto-GPT-Bot" git config --global user.email "github-bot@agpt.co" - - name: Checkout cassettes - if: ${{ startsWith(github.event_name, 'pull_request') }} - env: - PR_BASE: ${{ github.event.pull_request.base.ref }} - PR_BRANCH: ${{ github.event.pull_request.head.ref }} - PR_AUTHOR: ${{ github.event.pull_request.user.login }} - run: | - cassette_branch="${PR_AUTHOR}-${PR_BRANCH}" - cassette_base_branch="${PR_BASE}" - cd tests/vcr_cassettes - - if ! git ls-remote --exit-code --heads origin $cassette_base_branch ; then - cassette_base_branch="master" - fi - - if git ls-remote --exit-code --heads origin $cassette_branch ; then - git fetch origin $cassette_branch - git fetch origin $cassette_base_branch - - git checkout $cassette_branch - - # Pick non-conflicting cassette updates from the base branch - git merge --no-commit --strategy-option=ours origin/$cassette_base_branch - echo "Using cassettes from mirror branch '$cassette_branch'," \ - "synced to upstream branch '$cassette_base_branch'." - else - git checkout -b $cassette_branch - echo "Branch '$cassette_branch' does not exist in cassette submodule." \ - "Using cassettes from '$cassette_base_branch'." - fi - - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: @@ -163,80 +130,6 @@ jobs: token: ${{ secrets.CODECOV_TOKEN }} flags: autogpt-agent,${{ runner.os }} - - id: setup_git_auth - name: Set up git token authentication - # Cassettes may be pushed even when tests fail - if: success() || failure() - run: | - config_key="http.${{ github.server_url }}/.extraheader" - if [ "${{ runner.os }}" = 'macOS' ]; then - base64_pat=$(echo -n "pat:${{ secrets.PAT_REVIEW }}" | base64) - else - base64_pat=$(echo -n "pat:${{ secrets.PAT_REVIEW }}" | base64 -w0) - fi - - git config "$config_key" \ - "Authorization: Basic $base64_pat" - - cd tests/vcr_cassettes - git config "$config_key" \ - "Authorization: Basic $base64_pat" - - echo "config_key=$config_key" >> $GITHUB_OUTPUT - - - id: push_cassettes - name: Push updated cassettes - # For pull requests, push updated cassettes even when tests fail - if: github.event_name == 'push' || (! github.event.pull_request.head.repo.fork && (success() || failure())) - env: - PR_BRANCH: ${{ github.event.pull_request.head.ref }} - PR_AUTHOR: ${{ github.event.pull_request.user.login }} - run: | - if [ "${{ startsWith(github.event_name, 'pull_request') }}" = "true" ]; then - is_pull_request=true - cassette_branch="${PR_AUTHOR}-${PR_BRANCH}" - else - cassette_branch="${{ github.ref_name }}" - fi - - cd tests/vcr_cassettes - # Commit & push changes to cassettes if any - if ! git diff --quiet; then - git add . - git commit -m "Auto-update cassettes" - git push origin HEAD:$cassette_branch - if [ ! $is_pull_request ]; then - cd ../.. - git add tests/vcr_cassettes - git commit -m "Update cassette submodule" - git push origin HEAD:$cassette_branch - fi - echo "updated=true" >> $GITHUB_OUTPUT - else - echo "updated=false" >> $GITHUB_OUTPUT - echo "No cassette changes to commit" - fi - - - name: Post Set up git token auth - if: steps.setup_git_auth.outcome == 'success' - run: | - git config --unset-all '${{ steps.setup_git_auth.outputs.config_key }}' - git submodule foreach git config --unset-all '${{ steps.setup_git_auth.outputs.config_key }}' - - - name: Apply "behaviour change" label and comment on PR - if: ${{ startsWith(github.event_name, 'pull_request') }} - run: | - PR_NUMBER="${{ github.event.pull_request.number }}" - TOKEN="${{ secrets.PAT_REVIEW }}" - REPO="${{ github.repository }}" - - if [[ "${{ steps.push_cassettes.outputs.updated }}" == "true" ]]; then - echo "Adding label and comment..." - echo $TOKEN | gh auth login --with-token - gh issue edit $PR_NUMBER --add-label "behaviour change" - gh issue comment $PR_NUMBER --body "You changed AutoGPT's behaviour on ${{ runner.os }}. The cassettes have been updated and will be merged to the submodule when this Pull Request gets merged." - fi - - name: Upload logs to artifact if: always() uses: actions/upload-artifact@v4 diff --git a/.github/workflows/autogpt-docker-ci.yml b/.github/workflows/autogpt-docker-ci.yml index 7620eed638c8..3bbcfe8e779d 100644 --- a/.github/workflows/autogpt-docker-ci.yml +++ b/.github/workflows/autogpt-docker-ci.yml @@ -6,13 +6,11 @@ on: paths: - '.github/workflows/autogpt-docker-ci.yml' - 'autogpt/**' - - '!autogpt/tests/vcr_cassettes' pull_request: branches: [ master, development, release-* ] paths: - '.github/workflows/autogpt-docker-ci.yml' - 'autogpt/**' - - '!autogpt/tests/vcr_cassettes' concurrency: group: ${{ format('autogpt-docker-ci-{0}', github.head_ref && format('pr-{0}', github.event.pull_request.number) || github.sha) }} diff --git a/.github/workflows/autogpt-server-ci.yml b/.github/workflows/autogpt-server-ci.yml index 6fb1d9c23984..a1410d335831 100644 --- a/.github/workflows/autogpt-server-ci.yml +++ b/.github/workflows/autogpt-server-ci.yml @@ -6,13 +6,11 @@ on: paths: - ".github/workflows/autogpt-server-ci.yml" - "rnd/autogpt_server/**" - - "!autogpt/tests/vcr_cassettes" pull_request: branches: [master, development, release-*] paths: - ".github/workflows/autogpt-server-ci.yml" - "rnd/autogpt_server/**" - - "!autogpt/tests/vcr_cassettes" concurrency: group: ${{ format('autogpt-server-ci-{0}', github.head_ref && format('{0}-{1}', github.event_name, github.event.pull_request.number) || github.sha) }} diff --git a/.github/workflows/forge-ci.yml b/.github/workflows/forge-ci.yml index d996bdb2f892..a37173b6f3df 100644 --- a/.github/workflows/forge-ci.yml +++ b/.github/workflows/forge-ci.yml @@ -6,11 +6,13 @@ on: paths: - '.github/workflows/forge-ci.yml' - 'forge/**' + - '!forge/tests/vcr_cassettes' pull_request: branches: [ master, development, release-* ] paths: - '.github/workflows/forge-ci.yml' - 'forge/**' + - '!forge/tests/vcr_cassettes' concurrency: group: ${{ format('forge-ci-{0}', github.head_ref && format('{0}-{1}', github.event_name, github.event.pull_request.number) || github.sha) }} @@ -66,6 +68,37 @@ jobs: fetch-depth: 0 submodules: true + - name: Checkout cassettes + if: ${{ startsWith(github.event_name, 'pull_request') }} + env: + PR_BASE: ${{ github.event.pull_request.base.ref }} + PR_BRANCH: ${{ github.event.pull_request.head.ref }} + PR_AUTHOR: ${{ github.event.pull_request.user.login }} + run: | + cassette_branch="${PR_AUTHOR}-${PR_BRANCH}" + cassette_base_branch="${PR_BASE}" + cd tests/vcr_cassettes + + if ! git ls-remote --exit-code --heads origin $cassette_base_branch ; then + cassette_base_branch="master" + fi + + if git ls-remote --exit-code --heads origin $cassette_branch ; then + git fetch origin $cassette_branch + git fetch origin $cassette_base_branch + + git checkout $cassette_branch + + # Pick non-conflicting cassette updates from the base branch + git merge --no-commit --strategy-option=ours origin/$cassette_base_branch + echo "Using cassettes from mirror branch '$cassette_branch'," \ + "synced to upstream branch '$cassette_base_branch'." + else + git checkout -b $cassette_branch + echo "Branch '$cassette_branch' does not exist in cassette submodule." \ + "Using cassettes from '$cassette_base_branch'." + fi + - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: @@ -121,6 +154,80 @@ jobs: token: ${{ secrets.CODECOV_TOKEN }} flags: forge,${{ runner.os }} + - id: setup_git_auth + name: Set up git token authentication + # Cassettes may be pushed even when tests fail + if: success() || failure() + run: | + config_key="http.${{ github.server_url }}/.extraheader" + if [ "${{ runner.os }}" = 'macOS' ]; then + base64_pat=$(echo -n "pat:${{ secrets.PAT_REVIEW }}" | base64) + else + base64_pat=$(echo -n "pat:${{ secrets.PAT_REVIEW }}" | base64 -w0) + fi + + git config "$config_key" \ + "Authorization: Basic $base64_pat" + + cd tests/vcr_cassettes + git config "$config_key" \ + "Authorization: Basic $base64_pat" + + echo "config_key=$config_key" >> $GITHUB_OUTPUT + + - id: push_cassettes + name: Push updated cassettes + # For pull requests, push updated cassettes even when tests fail + if: github.event_name == 'push' || (! github.event.pull_request.head.repo.fork && (success() || failure())) + env: + PR_BRANCH: ${{ github.event.pull_request.head.ref }} + PR_AUTHOR: ${{ github.event.pull_request.user.login }} + run: | + if [ "${{ startsWith(github.event_name, 'pull_request') }}" = "true" ]; then + is_pull_request=true + cassette_branch="${PR_AUTHOR}-${PR_BRANCH}" + else + cassette_branch="${{ github.ref_name }}" + fi + + cd tests/vcr_cassettes + # Commit & push changes to cassettes if any + if ! git diff --quiet; then + git add . + git commit -m "Auto-update cassettes" + git push origin HEAD:$cassette_branch + if [ ! $is_pull_request ]; then + cd ../.. + git add tests/vcr_cassettes + git commit -m "Update cassette submodule" + git push origin HEAD:$cassette_branch + fi + echo "updated=true" >> $GITHUB_OUTPUT + else + echo "updated=false" >> $GITHUB_OUTPUT + echo "No cassette changes to commit" + fi + + - name: Post Set up git token auth + if: steps.setup_git_auth.outcome == 'success' + run: | + git config --unset-all '${{ steps.setup_git_auth.outputs.config_key }}' + git submodule foreach git config --unset-all '${{ steps.setup_git_auth.outputs.config_key }}' + + - name: Apply "behaviour change" label and comment on PR + if: ${{ startsWith(github.event_name, 'pull_request') }} + run: | + PR_NUMBER="${{ github.event.pull_request.number }}" + TOKEN="${{ secrets.PAT_REVIEW }}" + REPO="${{ github.repository }}" + + if [[ "${{ steps.push_cassettes.outputs.updated }}" == "true" ]]; then + echo "Adding label and comment..." + echo $TOKEN | gh auth login --with-token + gh issue edit $PR_NUMBER --add-label "behaviour change" + gh issue comment $PR_NUMBER --body "You changed AutoGPT's behaviour on ${{ runner.os }}. The cassettes have been updated and will be merged to the submodule when this Pull Request gets merged." + fi + - name: Upload logs to artifact if: always() uses: actions/upload-artifact@v4 diff --git a/.github/workflows/pr-label.yml b/.github/workflows/pr-label.yml index 15b4e73a9bb7..a719bfd8d32c 100644 --- a/.github/workflows/pr-label.yml +++ b/.github/workflows/pr-label.yml @@ -5,7 +5,7 @@ on: push: branches: [ master, development, release-* ] paths-ignore: - - 'autogpt/tests/vcr_cassettes' + - 'forge/tests/vcr_cassettes' - 'benchmark/reports/**' # So that the `dirtyLabel` is removed if conflicts are resolve # We recommend `pull_request_target` so that github secrets are available. diff --git a/.github/workflows/python-checks.yml b/.github/workflows/python-checks.yml index 17d49f283b10..f9042269b92b 100644 --- a/.github/workflows/python-checks.yml +++ b/.github/workflows/python-checks.yml @@ -9,7 +9,7 @@ on: - 'forge/**' - 'benchmark/**' - '**.py' - - '!autogpt/tests/vcr_cassettes' + - '!forge/tests/vcr_cassettes' pull_request: branches: [ master, development, release-* ] paths: @@ -18,7 +18,7 @@ on: - 'forge/**' - 'benchmark/**' - '**.py' - - '!autogpt/tests/vcr_cassettes' + - '!forge/tests/vcr_cassettes' concurrency: group: ${{ format('lint-ci-{0}', github.head_ref && format('{0}-{1}', github.event_name, github.event.pull_request.number) || github.sha) }} diff --git a/.gitmodules b/.gitmodules index 0258bcd361a4..aea59a8452cf 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,3 @@ -[submodule "autogpt/tests/vcr_cassettes"] - path = autogpt/tests/vcr_cassettes +[submodule "forge/tests/vcr_cassettes"] + path = forge/tests/vcr_cassettes url = https://github.com/Significant-Gravitas/Auto-GPT-test-cassettes diff --git a/autogpt/.gitattributes b/autogpt/.gitattributes deleted file mode 100644 index 60fb560da4aa..000000000000 --- a/autogpt/.gitattributes +++ /dev/null @@ -1,5 +0,0 @@ -# Exclude VCR cassettes from stats -tests/vcr_cassettes/**/**.y*ml linguist-generated - -# Mark documentation as such -docs/**.md linguist-documentation diff --git a/autogpt/poetry.lock b/autogpt/poetry.lock index b5c13400325e..3b0e9da62d5f 100644 --- a/autogpt/poetry.lock +++ b/autogpt/poetry.lock @@ -6065,20 +6065,6 @@ dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2 doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] -[[package]] -name = "types-beautifulsoup4" -version = "4.12.0.20240106" -description = "Typing stubs for beautifulsoup4" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-beautifulsoup4-4.12.0.20240106.tar.gz", hash = "sha256:98d628985b71b140bd3bc22a8cb0ab603c2f2d08f20d37925965eb4a21739be8"}, - {file = "types_beautifulsoup4-4.12.0.20240106-py3-none-any.whl", hash = "sha256:cbdd60ab8aeac737ac014431b6e921b43e84279c0405fdd25a6900bb0e71da5b"}, -] - -[package.dependencies] -types-html5lib = "*" - [[package]] name = "types-colorama" version = "0.4.15.20240106" @@ -6090,28 +6076,6 @@ files = [ {file = "types_colorama-0.4.15.20240106-py3-none-any.whl", hash = "sha256:18294bc18f60dc0b4895de8119964a5d895f5e180c2d1308fdd33009c0fa0f38"}, ] -[[package]] -name = "types-html5lib" -version = "1.1.11.20240106" -description = "Typing stubs for html5lib" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-html5lib-1.1.11.20240106.tar.gz", hash = "sha256:fc3a1b18eb601b3eeaf92c900bd67675c0a4fa1dd1d2a2893ebdb46923547ee9"}, - {file = "types_html5lib-1.1.11.20240106-py3-none-any.whl", hash = "sha256:61993cb89220107481e0f1da65c388ff8cf3d8c5f6e8483c97559639a596b697"}, -] - -[[package]] -name = "types-pillow" -version = "10.2.0.20240111" -description = "Typing stubs for Pillow" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-Pillow-10.2.0.20240111.tar.gz", hash = "sha256:e8d359bfdc5a149a3c90a7e153cb2d0750ddf7fc3508a20dfadabd8a9435e354"}, - {file = "types_Pillow-10.2.0.20240111-py3-none-any.whl", hash = "sha256:1f4243b30c143b56b0646626f052e4269123e550f9096cdfb5fbd999daee7dbb"}, -] - [[package]] name = "typing-extensions" version = "4.9.0" @@ -6793,4 +6757,4 @@ benchmark = ["agbenchmark"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "38a549db0d7726a14e3f990206928b6187e009aee31755b7286ede1c568d359b" +content-hash = "b3d4efee5861b32152024dada1ec61f4241122419cb538012c00a6ed55ac8a4b" diff --git a/autogpt/pyproject.toml b/autogpt/pyproject.toml index d51e22ad9003..99cfa79f22dc 100644 --- a/autogpt/pyproject.toml +++ b/autogpt/pyproject.toml @@ -22,21 +22,16 @@ serve = "autogpt.app.cli:serve" python = "^3.10" autogpt-forge = { path = "../forge", develop = true } # autogpt-forge = {git = "https://github.com/Significant-Gravitas/AutoGPT.git", subdirectory = "forge"} -beautifulsoup4 = "^4.12.2" click = "*" colorama = "^0.4.6" distro = "^1.8.0" fastapi = "^0.109.1" gitpython = "^3.1.32" -google-api-python-client = "*" hypercorn = "^0.14.4" openai = "^1.7.2" orjson = "^3.8.10" -Pillow = "*" pydantic = "^2.7.2" -python-docx = "*" python-dotenv = "^1.0.0" -pyyaml = "^6.0" requests = "*" sentry-sdk = "^1.40.4" @@ -55,9 +50,7 @@ pre-commit = "*" pyright = "^1.1.364" # Type stubs -types-beautifulsoup4 = "*" types-colorama = "*" -types-Pillow = "*" # Testing pytest = "*" @@ -66,7 +59,6 @@ pytest-cov = "*" pytest-mock = "*" pytest-recording = "*" pytest-xdist = "*" -vcrpy = { git = "https://github.com/Significant-Gravitas/vcrpy.git", rev = "master" } [tool.poetry.group.build] optional = true @@ -97,7 +89,3 @@ skip_glob = ["data"] pythonVersion = "3.10" exclude = ["data/**", "**/node_modules", "**/__pycache__", "**/.*"] ignore = ["../forge/**"] - - -[tool.pytest.ini_options] -markers = ["slow", "requires_openai_api_key", "requires_huggingface_api_key"] diff --git a/autogpt/tests/conftest.py b/autogpt/tests/conftest.py index bc7630713a61..1d9f1d31246f 100644 --- a/autogpt/tests/conftest.py +++ b/autogpt/tests/conftest.py @@ -20,7 +20,6 @@ pytest_plugins = [ "tests.integration.agent_factory", - "tests.vcr", ] diff --git a/forge/conftest.py b/forge/conftest.py new file mode 100644 index 000000000000..8b8251b4c644 --- /dev/null +++ b/forge/conftest.py @@ -0,0 +1,41 @@ +import uuid +from pathlib import Path + +import pytest + +from forge.file_storage.base import FileStorage, FileStorageConfiguration +from forge.file_storage.local import LocalFileStorage + +pytest_plugins = [ + "tests.vcr", +] + + +@pytest.fixture(scope="session", autouse=True) +def load_env_vars(): + from dotenv import load_dotenv + + load_dotenv() + + +@pytest.fixture() +def tmp_project_root(tmp_path: Path) -> Path: + return tmp_path + + +@pytest.fixture() +def app_data_dir(tmp_project_root: Path) -> Path: + dir = tmp_project_root / "data" + dir.mkdir(parents=True, exist_ok=True) + return dir + + +@pytest.fixture() +def storage(app_data_dir: Path) -> FileStorage: + storage = LocalFileStorage( + FileStorageConfiguration( + root=Path(f"{app_data_dir}/{str(uuid.uuid4())}"), restrict_to_root=False + ) + ) + storage.initialize() + return storage diff --git a/forge/forge/agent_protocol/agent_test.py b/forge/forge/agent_protocol/agent_test.py index 1d7196359156..4460b97aede3 100644 --- a/forge/forge/agent_protocol/agent_test.py +++ b/forge/forge/agent_protocol/agent_test.py @@ -12,9 +12,9 @@ @pytest.fixture -def agent(test_workspace: Path): +def agent(tmp_project_root: Path): db = AgentDB("sqlite:///test.db") - config = FileStorageConfiguration(root=test_workspace) + config = FileStorageConfiguration(root=tmp_project_root) workspace = LocalFileStorage(config) return ProtocolAgent(db, workspace) diff --git a/forge/forge/components/__init__.py b/forge/forge/components/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/forge/forge/components/code_executor/__init__.py b/forge/forge/components/code_executor/__init__.py index 0ffb090dc5a4..52607f9c34f6 100644 --- a/forge/forge/components/code_executor/__init__.py +++ b/forge/forge/components/code_executor/__init__.py @@ -1,8 +1,6 @@ from .code_executor import CodeExecutionError, CodeExecutorComponent __all__ = [ - "ALLOWLIST_CONTROL", - "DENYLIST_CONTROL", "CodeExecutionError", "CodeExecutorComponent", ] diff --git a/autogpt/tests/integration/test_execute_code.py b/forge/forge/components/code_executor/test_code_executor.py similarity index 87% rename from autogpt/tests/integration/test_execute_code.py rename to forge/forge/components/code_executor/test_code_executor.py index bf2bb236e8e1..e264d7abca71 100644 --- a/autogpt/tests/integration/test_execute_code.py +++ b/forge/forge/components/code_executor/test_code_executor.py @@ -4,13 +4,15 @@ from pathlib import Path import pytest -from forge.components.code_executor.code_executor import ( + +from forge.file_storage.base import FileStorage +from forge.utils.exceptions import InvalidArgumentError, OperationNotAllowedError + +from .code_executor import ( CodeExecutorComponent, is_docker_available, we_are_running_in_a_docker_container, ) -from forge.file_storage.base import FileStorage -from forge.utils.exceptions import InvalidArgumentError, OperationNotAllowedError @pytest.fixture @@ -103,6 +105,22 @@ def test_execute_python_file_not_found(code_executor_component: CodeExecutorComp code_executor_component.execute_python_file(Path("notexist.py")) +def test_execute_shell( + code_executor_component: CodeExecutorComponent, random_string: str +): + code_executor_component.config.shell_command_control = "allowlist" + code_executor_component.config.shell_allowlist = ["echo"] + result = code_executor_component.execute_shell(f"echo 'Hello {random_string}!'") + assert f"Hello {random_string}!" in result + + +def test_execute_shell_local_commands_not_allowed( + code_executor_component: CodeExecutorComponent, random_string: str +): + with pytest.raises(OperationNotAllowedError, match="not allowed"): + code_executor_component.execute_shell(f"echo 'Hello {random_string}!'") + + def test_execute_shell_denylist_should_deny( code_executor_component: CodeExecutorComponent, random_string: str ): diff --git a/autogpt/tests/unit/test_file_operations.py b/forge/forge/components/file_manager/test_file_manager.py similarity index 62% rename from autogpt/tests/unit/test_file_operations.py rename to forge/forge/components/file_manager/test_file_manager.py index 5ce06ca52f71..a6e45e1ee565 100644 --- a/autogpt/tests/unit/test_file_operations.py +++ b/forge/forge/components/file_manager/test_file_manager.py @@ -2,9 +2,11 @@ from pathlib import Path import pytest + +from forge.agent.base import BaseAgentSettings from forge.file_storage import FileStorage -from autogpt.agents.agent import Agent +from . import FileManagerComponent @pytest.fixture() @@ -13,8 +15,13 @@ def file_content(): @pytest.fixture -def file_manager_component(agent: Agent): - return agent.file_manager +def file_manager_component(storage: FileStorage): + return FileManagerComponent( + storage, + BaseAgentSettings( + agent_id="TestAgent", name="TestAgent", description="Test Agent description" + ), + ) @pytest.fixture() @@ -41,15 +48,14 @@ def test_nested_file(storage: FileStorage): async def test_read_file( test_file_path: Path, file_content, - file_manager_component, - agent: Agent, + file_manager_component: FileManagerComponent, ): - await agent.file_manager.workspace.write_file(test_file_path.name, file_content) + await file_manager_component.workspace.write_file(test_file_path.name, file_content) content = file_manager_component.read_file(test_file_path.name) assert content.replace("\r", "") == file_content -def test_read_file_not_found(file_manager_component): +def test_read_file_not_found(file_manager_component: FileManagerComponent): filename = "does_not_exist.txt" with pytest.raises(FileNotFoundError): file_manager_component.read_file(filename) @@ -57,12 +63,12 @@ def test_read_file_not_found(file_manager_component): @pytest.mark.asyncio async def test_write_to_file_relative_path( - test_file_name: Path, file_manager_component, agent: Agent + test_file_name: Path, file_manager_component: FileManagerComponent ): new_content = "This is new content.\n" await file_manager_component.write_to_file(test_file_name, new_content) with open( - agent.file_manager.workspace.get_path(test_file_name), "r", encoding="utf-8" + file_manager_component.workspace.get_path(test_file_name), "r", encoding="utf-8" ) as f: content = f.read() assert content == new_content @@ -70,7 +76,7 @@ async def test_write_to_file_relative_path( @pytest.mark.asyncio async def test_write_to_file_absolute_path( - test_file_path: Path, file_manager_component + test_file_path: Path, file_manager_component: FileManagerComponent ): new_content = "This is new content.\n" await file_manager_component.write_to_file(test_file_path, new_content) @@ -80,18 +86,18 @@ async def test_write_to_file_absolute_path( @pytest.mark.asyncio -async def test_list_files(file_manager_component, agent: Agent): +async def test_list_files(file_manager_component: FileManagerComponent): # Create files A and B file_a_name = "file_a.txt" file_b_name = "file_b.txt" test_directory = Path("test_directory") - await agent.file_manager.workspace.write_file(file_a_name, "This is file A.") - await agent.file_manager.workspace.write_file(file_b_name, "This is file B.") + await file_manager_component.workspace.write_file(file_a_name, "This is file A.") + await file_manager_component.workspace.write_file(file_b_name, "This is file B.") # Create a subdirectory and place a copy of file_a in it - agent.file_manager.workspace.make_dir(test_directory) - await agent.file_manager.workspace.write_file( + file_manager_component.workspace.make_dir(test_directory) + await file_manager_component.workspace.write_file( test_directory / file_a_name, "This is file A in the subdirectory." ) @@ -101,10 +107,10 @@ async def test_list_files(file_manager_component, agent: Agent): assert os.path.join(test_directory, file_a_name) in files # Clean up - agent.file_manager.workspace.delete_file(file_a_name) - agent.file_manager.workspace.delete_file(file_b_name) - agent.file_manager.workspace.delete_file(test_directory / file_a_name) - agent.file_manager.workspace.delete_dir(test_directory) + file_manager_component.workspace.delete_file(file_a_name) + file_manager_component.workspace.delete_file(file_b_name) + file_manager_component.workspace.delete_file(test_directory / file_a_name) + file_manager_component.workspace.delete_dir(test_directory) # Case 2: Search for a file that does not exist and make sure we don't throw non_existent_file = "non_existent_file.txt" diff --git a/autogpt/tests/unit/test_git_commands.py b/forge/forge/components/git_operations/test_git_operations.py similarity index 87% rename from autogpt/tests/unit/test_git_commands.py rename to forge/forge/components/git_operations/test_git_operations.py index bb736a2a0c8b..8d6cb4f86dab 100644 --- a/autogpt/tests/unit/test_git_commands.py +++ b/forge/forge/components/git_operations/test_git_operations.py @@ -1,11 +1,11 @@ import pytest -from forge.components.git_operations import GitOperationsComponent -from forge.file_storage.base import FileStorage -from forge.utils.exceptions import CommandExecutionError from git.exc import GitCommandError from git.repo.base import Repo -from autogpt.agents.agent import Agent +from forge.file_storage.base import FileStorage +from forge.utils.exceptions import CommandExecutionError + +from . import GitOperationsComponent @pytest.fixture @@ -14,15 +14,14 @@ def mock_clone_from(mocker): @pytest.fixture -def git_ops_component(agent: Agent): - return agent.git_ops +def git_ops_component(): + return GitOperationsComponent() def test_clone_auto_gpt_repository( git_ops_component: GitOperationsComponent, storage: FileStorage, mock_clone_from, - agent: Agent, ): mock_clone_from.return_value = None @@ -46,7 +45,6 @@ def test_clone_repository_error( git_ops_component: GitOperationsComponent, storage: FileStorage, mock_clone_from, - agent: Agent, ): url = "https://github.com/this-repository/does-not-exist.git" clone_path = storage.get_path("does-not-exist") diff --git a/autogpt/tests/integration/test_image_gen.py b/forge/forge/components/image_gen/test_image_gen.py similarity index 97% rename from autogpt/tests/integration/test_image_gen.py rename to forge/forge/components/image_gen/test_image_gen.py index 792810ad77d0..9fa7bc308c28 100644 --- a/autogpt/tests/integration/test_image_gen.py +++ b/forge/forge/components/image_gen/test_image_gen.py @@ -4,17 +4,22 @@ from unittest.mock import patch import pytest +from PIL import Image +from pydantic import SecretStr, ValidationError + from forge.components.image_gen import ImageGeneratorComponent from forge.components.image_gen.image_gen import ImageGeneratorConfiguration from forge.file_storage.base import FileStorage from forge.llm.providers.openai import OpenAICredentials -from PIL import Image -from pydantic import SecretStr @pytest.fixture def image_gen_component(storage: FileStorage): - cred = OpenAICredentials.from_env() + try: + cred = OpenAICredentials.from_env() + except ValidationError: + cred = OpenAICredentials(api_key=SecretStr("test")) + return ImageGeneratorComponent(storage, openai_credentials=cred) @@ -34,7 +39,6 @@ def image_size(request): return request.param -@pytest.mark.requires_openai_api_key @pytest.mark.vcr def test_dalle( image_gen_component: ImageGeneratorComponent, @@ -52,7 +56,6 @@ def test_dalle( reason="The image is too big to be put in a cassette for a CI pipeline. " "We're looking into a solution." ) -@pytest.mark.requires_huggingface_api_key @pytest.mark.parametrize( "image_model", ["CompVis/stable-diffusion-v1-4", "stabilityai/stable-diffusion-2-1"], diff --git a/autogpt/tests/unit/test_web_search.py b/forge/forge/components/web/test_search.py similarity index 88% rename from autogpt/tests/unit/test_web_search.py rename to forge/forge/components/web/test_search.py index 2f0c9285d72d..2ce5ff8a8585 100644 --- a/autogpt/tests/unit/test_web_search.py +++ b/forge/forge/components/web/test_search.py @@ -1,19 +1,23 @@ import json import pytest -from forge.components.web.search import WebSearchComponent -from forge.utils.exceptions import ConfigurationError from googleapiclient.errors import HttpError +from httplib2 import Response from pydantic import SecretStr -from autogpt.agents.agent import Agent +from forge.utils.exceptions import ConfigurationError + +from . import WebSearchComponent @pytest.fixture -def web_search_component(agent: Agent): - agent.web_search.config.google_api_key = SecretStr("test") - agent.web_search.config.google_custom_search_engine_id = SecretStr("test") - return agent.web_search +def web_search_component(): + component = WebSearchComponent() + if component.config.google_api_key is None: + component.config.google_api_key = SecretStr("test") + if component.config.google_custom_search_engine_id is None: + component.config.google_custom_search_engine_id = SecretStr("test") + return component @pytest.mark.parametrize( @@ -134,16 +138,11 @@ def test_google_official_search_errors( error_msg, web_search_component: WebSearchComponent, ): - class resp: - def __init__(self, _status, _reason): - self.status = _status - self.reason = _reason - response_content = { "error": {"code": http_code, "message": error_msg, "reason": "backendError"} } error = HttpError( - resp=resp(http_code, error_msg), + resp=Response({"status": http_code, "reason": error_msg}), content=str.encode(json.dumps(response_content)), uri="https://www.googleapis.com/customsearch/v1?q=invalid+query&cx", ) diff --git a/autogpt/tests/integration/test_web_selenium.py b/forge/forge/components/web/test_selenium.py similarity index 59% rename from autogpt/tests/integration/test_web_selenium.py rename to forge/forge/components/web/test_selenium.py index e8198af3ab89..cbd07cd3cebb 100644 --- a/autogpt/tests/integration/test_web_selenium.py +++ b/forge/forge/components/web/test_selenium.py @@ -1,19 +1,20 @@ +from pathlib import Path + import pytest -from forge.components.web.selenium import BrowsingError, WebSeleniumComponent -from autogpt.agents.agent import Agent +from forge.llm.providers.multi import MultiProvider + +from . import BrowsingError, WebSeleniumComponent @pytest.fixture -def web_selenium_component(agent: Agent): - return agent.web_selenium +def web_selenium_component(app_data_dir: Path): + return WebSeleniumComponent(MultiProvider(), app_data_dir) -@pytest.mark.vcr -@pytest.mark.requires_openai_api_key @pytest.mark.asyncio async def test_browse_website_nonexistent_url( - web_selenium_component: WebSeleniumComponent, cached_openai_client: None + web_selenium_component: WebSeleniumComponent, ): url = "https://auto-gpt-thinks-this-website-does-not-exist.com" question = "How to execute a barrel roll" diff --git a/forge/forge/conftest.py b/forge/forge/conftest.py deleted file mode 100644 index 4a223b9a077b..000000000000 --- a/forge/forge/conftest.py +++ /dev/null @@ -1,8 +0,0 @@ -from pathlib import Path - -import pytest - - -@pytest.fixture() -def test_workspace(tmp_path: Path) -> Path: - return tmp_path diff --git a/autogpt/tests/unit/test_gcs_file_storage.py b/forge/forge/file_storage/test_gcs_file_storage.py similarity index 98% rename from autogpt/tests/unit/test_gcs_file_storage.py rename to forge/forge/file_storage/test_gcs_file_storage.py index d10c48988d79..7e27367ada5a 100644 --- a/autogpt/tests/unit/test_gcs_file_storage.py +++ b/forge/forge/file_storage/test_gcs_file_storage.py @@ -4,11 +4,12 @@ import pytest import pytest_asyncio -from forge.file_storage.gcs import GCSFileStorage, GCSFileStorageConfiguration from google.auth.exceptions import GoogleAuthError from google.cloud import storage from google.cloud.exceptions import NotFound +from .gcs import GCSFileStorage, GCSFileStorageConfiguration + try: storage.Client() except GoogleAuthError: diff --git a/autogpt/tests/unit/test_local_file_storage.py b/forge/forge/file_storage/test_local_file_storage.py similarity index 98% rename from autogpt/tests/unit/test_local_file_storage.py rename to forge/forge/file_storage/test_local_file_storage.py index 930e562e4c59..c8de2b27f545 100644 --- a/autogpt/tests/unit/test_local_file_storage.py +++ b/forge/forge/file_storage/test_local_file_storage.py @@ -1,7 +1,8 @@ from pathlib import Path import pytest -from forge.file_storage.local import FileStorageConfiguration, LocalFileStorage + +from .local import FileStorageConfiguration, LocalFileStorage _ACCESSIBLE_PATHS = [ Path("."), diff --git a/autogpt/tests/unit/test_s3_file_storage.py b/forge/forge/file_storage/test_s3_file_storage.py similarity index 98% rename from autogpt/tests/unit/test_s3_file_storage.py rename to forge/forge/file_storage/test_s3_file_storage.py index 3a40ad8f7f4a..922904399c48 100644 --- a/autogpt/tests/unit/test_s3_file_storage.py +++ b/forge/forge/file_storage/test_s3_file_storage.py @@ -5,7 +5,8 @@ import pytest import pytest_asyncio from botocore.exceptions import ClientError -from forge.file_storage.s3 import S3FileStorage, S3FileStorageConfiguration + +from .s3 import S3FileStorage, S3FileStorageConfiguration if not (os.getenv("S3_ENDPOINT_URL") and os.getenv("AWS_ACCESS_KEY_ID")): pytest.skip("S3 environment variables are not set", allow_module_level=True) diff --git a/autogpt/tests/unit/test_json.py b/forge/forge/json/test_parsing.py similarity index 98% rename from autogpt/tests/unit/test_json.py rename to forge/forge/json/test_parsing.py index 906723883837..882d6013257d 100644 --- a/autogpt/tests/unit/test_json.py +++ b/forge/forge/json/test_parsing.py @@ -1,7 +1,8 @@ import json import pytest -from forge.json.parsing import json_loads + +from .parsing import json_loads _JSON_FIXABLE: list[tuple[str, str]] = [ # Missing comma diff --git a/autogpt/tests/unit/test_logs.py b/forge/forge/logging/test_utils.py similarity index 96% rename from autogpt/tests/unit/test_logs.py rename to forge/forge/logging/test_utils.py index fd3c342db038..b3682d42cf0d 100644 --- a/autogpt/tests/unit/test_logs.py +++ b/forge/forge/logging/test_utils.py @@ -1,5 +1,6 @@ import pytest -from forge.logging.utils import remove_color_codes + +from .utils import remove_color_codes @pytest.mark.parametrize( diff --git a/forge/forge/utils/__init__.py b/forge/forge/utils/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/autogpt/tests/unit/test_text_file_parsers.py b/forge/forge/utils/test_file_operations.py similarity index 98% rename from autogpt/tests/unit/test_text_file_parsers.py rename to forge/forge/utils/test_file_operations.py index 77a4af696fdf..e333b3e40e4b 100644 --- a/autogpt/tests/unit/test_text_file_parsers.py +++ b/forge/forge/utils/test_file_operations.py @@ -9,7 +9,8 @@ import pytest import yaml from bs4 import BeautifulSoup -from forge.utils.file_operations import decode_textual_file, is_file_binary_fn + +from .file_operations import decode_textual_file, is_file_binary_fn logger = logging.getLogger(__name__) diff --git a/autogpt/tests/unit/test_url_validation.py b/forge/forge/utils/test_url_validator.py similarity index 98% rename from autogpt/tests/unit/test_url_validation.py rename to forge/forge/utils/test_url_validator.py index 38116a622926..dc2cce300805 100644 --- a/autogpt/tests/unit/test_url_validation.py +++ b/forge/forge/utils/test_url_validator.py @@ -1,7 +1,8 @@ import pytest -from forge.utils.url_validator import validate_url from pytest import raises +from .url_validator import validate_url + @validate_url def dummy_method(url): diff --git a/forge/poetry.lock b/forge/poetry.lock index 127d6efa8343..6a863f7a3f4b 100644 --- a/forge/poetry.lock +++ b/forge/poetry.lock @@ -4986,6 +4986,42 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] +[[package]] +name = "pytest-mock" +version = "3.14.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "pytest-recording" +version = "0.13.1" +description = "A pytest plugin that allows you recording of network interactions via VCR.py" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest_recording-0.13.1-py3-none-any.whl", hash = "sha256:e5c75feb2593eb4ed9362182c6640bfe19004204bf9a6082d62c91b5fdb50a3e"}, + {file = "pytest_recording-0.13.1.tar.gz", hash = "sha256:1265d679f39263f115968ec01c2a3bfed250170fd1b0d9e288970b2e4a13737a"}, +] + +[package.dependencies] +pytest = ">=3.5.0" +vcrpy = ">=2.0.1" + +[package.extras] +dev = ["pytest-recording[tests]"] +tests = ["pytest-httpbin", "pytest-mock", "requests", "werkzeug (==3.0.1)"] + [[package]] name = "python-dateutil" version = "2.8.2" @@ -6504,6 +6540,26 @@ files = [ docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] +[[package]] +name = "vcrpy" +version = "5.1.0" +description = "Automatically mock your HTTP interactions to simplify and speed up testing" +optional = false +python-versions = ">=3.8" +files = [] +develop = false + +[package.dependencies] +PyYAML = "*" +wrapt = "*" +yarl = "*" + +[package.source] +type = "git" +url = "https://github.com/Significant-Gravitas/vcrpy.git" +reference = "master" +resolved_reference = "bfd15f9d06a516138b673cb481547f3352d9cc43" + [[package]] name = "virtualenv" version = "20.25.0" @@ -7029,4 +7085,4 @@ benchmark = ["agbenchmark"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "5b8cca9caced2687d88fc61dc263054f15c49f2daa1560fa4d94fb5b38d461aa" +content-hash = "7523abd672967cbe924f045a00bf519ee08c8537fdf2f2191d2928201497d7b7" diff --git a/forge/pyproject.toml b/forge/pyproject.toml index 991012014e58..b03fe7127189 100644 --- a/forge/pyproject.toml +++ b/forge/pyproject.toml @@ -76,7 +76,10 @@ types-requests = "^2.31.0.2" pytest = "^7.4.0" pytest-asyncio = "^0.23.3" pytest-cov = "^5.0.0" +pytest-mock = "*" +pytest-recording = "*" mock = "^5.1.0" +vcrpy = { git = "https://github.com/Significant-Gravitas/vcrpy.git", rev = "master" } [build-system] @@ -101,3 +104,4 @@ pythonVersion = "3.10" [tool.pytest.ini_options] pythonpath = ["forge"] testpaths = ["forge", "tests"] +markers = ["slow"] diff --git a/autogpt/tests/vcr/__init__.py b/forge/tests/vcr/__init__.py similarity index 100% rename from autogpt/tests/vcr/__init__.py rename to forge/tests/vcr/__init__.py diff --git a/autogpt/tests/vcr/vcr_filter.py b/forge/tests/vcr/vcr_filter.py similarity index 100% rename from autogpt/tests/vcr/vcr_filter.py rename to forge/tests/vcr/vcr_filter.py diff --git a/autogpt/tests/vcr_cassettes b/forge/tests/vcr_cassettes similarity index 100% rename from autogpt/tests/vcr_cassettes rename to forge/tests/vcr_cassettes