From f109c3e019aea89aca634797f8ed94b49d3e0239 Mon Sep 17 00:00:00 2001 From: Reinier van der Leer Date: Tue, 24 Sep 2024 18:26:15 +0200 Subject: [PATCH 01/12] ci: Set up customized CodeQL scanning (#8106) This replaces the dynamically configured CodeQL scanning that we had set up through the GitHub settings. --- .github/workflows/codeql.yml | 97 ++++++++++++++++++++++++++++++++++++ 1 file changed, 97 insertions(+) create mode 100644 .github/workflows/codeql.yml diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 000000000000..31848f3bbbe6 --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,97 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: [ "master", "release-*" ] + pull_request: + branches: [ "master", "release-*" ] + schedule: + - cron: '15 4 * * 0' + +jobs: + analyze: + name: Analyze (${{ matrix.language }}) + # Runner size impacts CodeQL analysis time. To learn more, please see: + # - https://gh.io/recommended-hardware-resources-for-running-codeql + # - https://gh.io/supported-runners-and-hardware-resources + # - https://gh.io/using-larger-runners (GitHub.com only) + # Consider using larger runners or machines with greater resources for possible analysis time improvements. + runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} + permissions: + # required for all workflows + security-events: write + + # required to fetch internal or private CodeQL packs + packages: read + + # only required for workflows in private repositories + actions: read + contents: read + + strategy: + fail-fast: false + matrix: + include: + - language: typescript + build-mode: none + - language: python + build-mode: none + # CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' + # Use `c-cpp` to analyze code written in C, C++ or both + # Use 'java-kotlin' to analyze code written in Java, Kotlin or both + # Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both + # To learn more about changing the languages that are analyzed or customizing the build mode for your analysis, + # see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning. + # If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how + # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + build-mode: ${{ matrix.build-mode }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + config: | + paths-ignore: + - classic/frontend/build/** + + # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + # If the analyze step fails for one of the languages you are analyzing with + # "We were unable to automatically build your code", modify the matrix above + # to set the build mode to "manual" for that language. Then modify this step + # to build your code. + # ℹ️ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + - if: matrix.build-mode == 'manual' + shell: bash + run: | + echo 'If you are using a "manual" build mode for one or more of the' \ + 'languages you are analyzing, replace this with the commands to build' \ + 'your code, for example:' + echo ' make bootstrap' + echo ' make release' + exit 1 + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{matrix.language}}" From b5dd75fad29d29d403c166eed56de292cd1da445 Mon Sep 17 00:00:00 2001 From: Toran Bruce Richards Date: Tue, 24 Sep 2024 20:03:41 +0100 Subject: [PATCH 02/12] Update README with new Tutorial.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 9393a5ca0639..db3c4ba02f03 100644 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ ## How to Get Started -https://github.com/user-attachments/assets/8508f4dc-b362-4cab-900f-644964a96cdf +https://github.com/user-attachments/assets/d04273a5-b36a-4a37-818e-f631ce72d603 ### 🧱 AutoGPT Builder From 769058a8c9ef5499b072dafe27acb21b44086276 Mon Sep 17 00:00:00 2001 From: Kaitlyn Barnard Date: Tue, 24 Sep 2024 12:24:21 -0700 Subject: [PATCH 03/12] Update README.md (#8150) Edits based on AutoGPT Platform launch Co-authored-by: Toran Bruce Richards --- README.md | 48 +++++++++++++++++++++++++++++++++++------------- 1 file changed, 35 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index db3c4ba02f03..b28bcf918217 100644 --- a/README.md +++ b/README.md @@ -1,41 +1,63 @@ -# AutoGPT: Build & Use AI Agents +# AutoGPT: Build, Deploy, and Run AI Agents [![Discord Follow](https://dcbadge.vercel.app/api/server/autogpt?style=flat)](https://discord.gg/autogpt)   [![Twitter Follow](https://img.shields.io/twitter/follow/Auto_GPT?style=social)](https://twitter.com/Auto_GPT)   [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) -**AutoGPT** is a powerful tool that lets you create and run intelligent agents. These agents can perform various tasks automatically, making your life easier. +**AutoGPT** is a powerful platform that allows you to create, deploy, and manage continuous AI agents that automate complex workflows. + +## Hosting Options + - Download to self-host + - [Join the Waitlist](https://bit.ly/3ZDijAI) for the cloud-hosted beta ## How to Get Started https://github.com/user-attachments/assets/d04273a5-b36a-4a37-818e-f631ce72d603 -### 🧱 AutoGPT Builder +### 🧱 AutoGPT Frontend + +The AutoGPT frontend is where users interact with our powerful AI automation platform. It offers multiple ways to engage with and leverage our AI agents. This is the interface where you'll bring your AI automation ideas to life: -The AutoGPT Builder is the frontend. It allows you to design agents using an easy flowchart style. You build your agent by connecting blocks, where each block performs a single action. It's simple and intuitive! + **Agent Builder:** For those who want to customize, our intuitive, low-code interface allows you to design and configure your own AI agents. + + **Workflow Management:** Build, modify, and optimize your automation workflows with ease. You build your agent by connecting blocks, where each block performs a single action. + + **Deployment Controls:** Manage the lifecycle of your agents, from testing to production. + + **Ready-to-Use Agents:** Don't want to build? Simply select from our library of pre-configured agents and put them to work immediately. + + **Agent Interaction:** Whether you've built your own or are using pre-configured agents, easily run and interact with them through our user-friendly interface. + + **Monitoring and Analytics:** Keep track of your agents' performance and gain insights to continually improve your automation processes. [Read this guide](https://docs.agpt.co/server/new_blocks/) to learn how to build your own custom blocks. ### 💽 AutoGPT Server -The AutoGPT Server is the backend. This is where your agents run. Once deployed, agents can be triggered by external sources and can operate continuously. +The AutoGPT Server is the powerhouse of our platform This is where your agents run. Once deployed, agents can be triggered by external sources and can operate continuously. It contains all the essential components that make AutoGPT run smoothly. + + **Source Code:** The core logic that drives our agents and automation processes. + + **Infrastructure:** Robust systems that ensure reliable and scalable performance. + + **Marketplace:** A comprehensive marketplace where you can find and deploy a wide range of pre-built agents. ### 🐙 Example Agents Here are two examples of what you can do with AutoGPT: -1. **Reddit Marketing Agent** - - This agent reads comments on Reddit. - - It looks for people asking about your product. - - It then automatically responds to them. +1. **Generate Viral Videos from Trending Topics** + - This agent reads topics on Reddit. + - It identifies trending topics. + - It then automatically creates a short-form video based on the content. -2. **YouTube Content Repurposing Agent** +2. **Identify Top Quotes from Videos for Social Media** - This agent subscribes to your YouTube channel. - When you post a new video, it transcribes it. - - It uses AI to write a search engine optimized blog post. - - Then, it publishes this blog post to your Medium account. + - It uses AI to identify the most impactful quotes to generate a summary. + - Then, it writes a post to automatically publish to your social media. -These examples show just a glimpse of what you can achieve with AutoGPT! +These examples show just a glimpse of what you can achieve with AutoGPT! You can create customized workflows to build agents for any use case. --- Our mission is to provide the tools, so that you can focus on what matters: From e04beffe62757995ce045d3112567b470e1b31c3 Mon Sep 17 00:00:00 2001 From: Toran Bruce Richards Date: Tue, 24 Sep 2024 20:55:24 +0100 Subject: [PATCH 04/12] Update README.md (#8153) * Update README.md * Update README.md * Update README.md --- README.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index b28bcf918217..c6c603988e7e 100644 --- a/README.md +++ b/README.md @@ -10,10 +10,15 @@ - Download to self-host - [Join the Waitlist](https://bit.ly/3ZDijAI) for the cloud-hosted beta -## How to Get Started +## How to Setup for Self-Hosting +> [!NOTE] +> Setting up and hosting the AutoGPT Platform yourself is a technical process. +> If you'd rather something that just works, we recommend [joining the waitlist](https://bit.ly/3ZDijAI) for the cloud-hosted beta. https://github.com/user-attachments/assets/d04273a5-b36a-4a37-818e-f631ce72d603 +This tutorial assumes you have Docker, VSCode, git and npm installed. + ### 🧱 AutoGPT Frontend The AutoGPT frontend is where users interact with our powerful AI automation platform. It offers multiple ways to engage with and leverage our AI agents. This is the interface where you'll bring your AI automation ideas to life: From 00b8d219f2d85c1d12cd3cc1cfe0f5f431d7c19d Mon Sep 17 00:00:00 2001 From: Swifty Date: Tue, 24 Sep 2024 22:01:06 +0200 Subject: [PATCH 05/12] fix(frontend): Remove Sentry Pop-up and add run options (#8138) * modify sentry setup * Update sentry.client.config.ts * remove env vars from dev so it will work on windows still --- autogpt_platform/frontend/package.json | 3 ++- autogpt_platform/frontend/sentry.client.config.ts | 10 +--------- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/autogpt_platform/frontend/package.json b/autogpt_platform/frontend/package.json index 5f9b5cad686e..d37c0977b9b1 100644 --- a/autogpt_platform/frontend/package.json +++ b/autogpt_platform/frontend/package.json @@ -3,7 +3,8 @@ "version": "0.1.0", "private": true, "scripts": { - "dev": "export NODE_ENV=development && next dev", + "dev": "next dev", + "dev:nosentry": "export NODE_ENV=development && export DISABLE_SENTRY=true && next dev", "dev:test": "export NODE_ENV=test && next dev", "build": "next build", "start": "next start", diff --git a/autogpt_platform/frontend/sentry.client.config.ts b/autogpt_platform/frontend/sentry.client.config.ts index f37d5cda2394..bbfe73ea514e 100644 --- a/autogpt_platform/frontend/sentry.client.config.ts +++ b/autogpt_platform/frontend/sentry.client.config.ts @@ -7,7 +7,7 @@ import * as Sentry from "@sentry/nextjs"; Sentry.init({ dsn: "https://fe4e4aa4a283391808a5da396da20159@o4505260022104064.ingest.us.sentry.io/4507946746380288", - enabled: process.env.NODE_ENV !== "development", + enabled: process.env.DISABLE_SENTRY !== "true", // Add optional integrations for additional features integrations: [ @@ -31,14 +31,6 @@ Sentry.init({ /^https:\/\/dev\-builder\.agpt\.co\/api/, ], - beforeSend(event, hint) { - // Check if it is an exception, and if so, show the report dialog - if (event.exception && event.event_id) { - Sentry.showReportDialog({ eventId: event.event_id }); - } - return event; - }, - // Define how likely Replay events are sampled. // This sets the sample rate to be 10%. You may want this to be 100% while // in development and sample at a lower rate in production From 591a2bc43191a02f72a9ec01f1c962bdf123c3d7 Mon Sep 17 00:00:00 2001 From: Toran Bruce Richards Date: Tue, 24 Sep 2024 23:11:57 +0100 Subject: [PATCH 06/12] Add files via upload --- .../Contributor License Agreement (CLA).md | 21 +++ autogpt_platform/LICENCE.txt | 164 ++++++++++++++++++ 2 files changed, 185 insertions(+) create mode 100644 autogpt_platform/Contributor License Agreement (CLA).md create mode 100644 autogpt_platform/LICENCE.txt diff --git a/autogpt_platform/Contributor License Agreement (CLA).md b/autogpt_platform/Contributor License Agreement (CLA).md new file mode 100644 index 000000000000..b15bd52e5698 --- /dev/null +++ b/autogpt_platform/Contributor License Agreement (CLA).md @@ -0,0 +1,21 @@ +**Determinist Ltd** + +**Contributor License Agreement (“Agreement”)** + +Thank you for your interest in the AutoGPT open source project at [https://github.com/Significant-Gravitas/AutoGPT](https://github.com/Significant-Gravitas/AutoGPT) stewarded by Determinist Ltd (“**Determinist**”), with offices at 3rd Floor 1 Ashley Road, Altrincham, Cheshire, WA14 2DT, United Kingdom. The form of license below is a document that clarifies the terms under which You, the person listed below, may contribute software code described below (the “**Contribution**”) to the project. We appreciate your participation in our project, and your help in improving our products, so we want you to understand what will be done with the Contributions. This license is for your protection as well as the protection of Determinist and its licensees; it does not change your rights to use your own Contributions for any other purpose. + +By submitting a Pull Request which modifies the content of the “autogpt\_platform” folder at [https://github.com/Significant-Gravitas/AutoGPT/tree/master/autogpt\_platform](https://github.com/Significant-Gravitas/AutoGPT/tree/master/autogpt_platform), You hereby agree: + +1\. **You grant us the ability to use the Contributions in any way**. You hereby grant to Determinist a non-exclusive, irrevocable, worldwide, royalty-free, sublicenseable, transferable license under all of Your relevant intellectual property rights (including copyright, patent, and any other rights), to use, copy, prepare derivative works of, distribute and publicly perform and display the Contributions on any licensing terms, including without limitation: (a) open source licenses like the GNU General Public License (GPL), the GNU Lesser General Public License (LGPL), the Common Public License, or the Berkeley Science Division license (BSD); and (b) binary, proprietary, or commercial licenses. + +2\. **Grant of Patent License**. You hereby grant to Determinist a worldwide, non-exclusive, royalty-free, irrevocable, license, under any rights you may have, now or in the future, in any patents or patent applications, to make, have made, use, offer to sell, sell, and import products containing the Contribution or portions of the Contribution. This license extends to patent claims that are infringed by the Contribution alone or by combination of the Contribution with other inventions. + +4\. **Limitations on Licenses**. The licenses granted in this Agreement will continue for the duration of the applicable patent or intellectual property right under which such license is granted. The licenses granted in this Agreement will include the right to grant and authorize sublicenses, so long as the sublicenses are within the scope of the licenses granted in this Agreement. Except for the licenses granted herein, You reserve all right, title, and interest in and to the Contribution. + +5\. **You are able to grant us these rights**. You represent that You are legally entitled to grant the above license. If Your employer has rights to intellectual property that You create, You represent that You are authorized to make the Contributions on behalf of that employer, or that Your employer has waived such rights for the Contributions. + +3\. **The Contributions are your original work**. You represent that the Contributions are Your original works of authorship, and to Your knowledge, no other person claims, or has the right to claim, any right in any invention or patent related to the Contributions. You also represent that You are not legally obligated, whether by entering into an agreement or otherwise, in any way that conflicts with the terms of this license. For example, if you have signed an agreement requiring you to assign the intellectual property rights in the Contributions to an employer or customer, that would conflict with the terms of this license. + +6\. **We determine the code that is in our products**. You understand that the decision to include the Contribution in any product or source repository is entirely that of Determinist, and this agreement does not guarantee that the Contributions will be included in any product. + +7\. **No Implied Warranties.** Determinist acknowledges that, except as explicitly described in this Agreement, the Contribution is provided on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, ANY WARRANTIES OR CONDITIONS OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY, OR FITNESS FOR A PARTICULAR PURPOSE. diff --git a/autogpt_platform/LICENCE.txt b/autogpt_platform/LICENCE.txt new file mode 100644 index 000000000000..3f09b052c0f4 --- /dev/null +++ b/autogpt_platform/LICENCE.txt @@ -0,0 +1,164 @@ +# PolyForm Shield License 1.0.0 + + + +## Acceptance + +In order to get any license under these terms, you must agree +to them as both strict obligations and conditions to all +your licenses. + +## Copyright License + +The licensor grants you a copyright license for the +software to do everything you might do with the software +that would otherwise infringe the licensor's copyright +in it for any permitted purpose. However, you may +only distribute the software according to [Distribution +License](#distribution-license) and make changes or new works +based on the software according to [Changes and New Works +License](#changes-and-new-works-license). + +## Distribution License + +The licensor grants you an additional copyright license +to distribute copies of the software. Your license +to distribute covers distributing the software with +changes and new works permitted by [Changes and New Works +License](#changes-and-new-works-license). + +## Notices + +You must ensure that anyone who gets a copy of any part of +the software from you also gets a copy of these terms or the +URL for them above, as well as copies of any plain-text lines +beginning with `Required Notice:` that the licensor provided +with the software. For example: + +> Required Notice: Copyright Yoyodyne, Inc. (http://example.com) + +## Changes and New Works License + +The licensor grants you an additional copyright license to +make changes and new works based on the software for any +permitted purpose. + +## Patent License + +The licensor grants you a patent license for the software that +covers patent claims the licensor can license, or becomes able +to license, that you would infringe by using the software. + +## Noncompete + +Any purpose is a permitted purpose, except for providing any +product that competes with the software or any product the +licensor or any of its affiliates provides using the software. + +## Competition + +Goods and services compete even when they provide functionality +through different kinds of interfaces or for different technical +platforms. Applications can compete with services, libraries +with plugins, frameworks with development tools, and so on, +even if they're written in different programming languages +or for different computer architectures. Goods and services +compete even when provided free of charge. If you market a +product as a practical substitute for the software or another +product, it definitely competes. + +## New Products + +If you are using the software to provide a product that does +not compete, but the licensor or any of its affiliates brings +your product into competition by providing a new version of +the software or another product using the software, you may +continue using versions of the software available under these +terms beforehand to provide your competing product, but not +any later versions. + +## Discontinued Products + +You may begin using the software to compete with a product +or service that the licensor or any of its affiliates has +stopped providing, unless the licensor includes a plain-text +line beginning with `Licensor Line of Business:` with the +software that mentions that line of business. For example: + +> Licensor Line of Business: YoyodyneCMS Content Management +System (http://example.com/cms) + +## Sales of Business + +If the licensor or any of its affiliates sells a line of +business developing the software or using the software +to provide a product, the buyer can also enforce +[Noncompete](#noncompete) for that product. + +## Fair Use + +You may have "fair use" rights for the software under the +law. These terms do not limit them. + +## No Other Rights + +These terms do not allow you to sublicense or transfer any of +your licenses to anyone else, or prevent the licensor from +granting licenses to anyone else. These terms do not imply +any other licenses. + +## Patent Defense + +If you make any written claim that the software infringes or +contributes to infringement of any patent, your patent license +for the software granted under these terms ends immediately. If +your company makes such a claim, your patent license ends +immediately for work on behalf of your company. + +## Violations + +The first time you are notified in writing that you have +violated any of these terms, or done anything with the software +not covered by your licenses, your licenses can nonetheless +continue if you come into full compliance with these terms, +and take practical steps to correct past violations, within +32 days of receiving notice. Otherwise, all your licenses +end immediately. + +## No Liability + +***As far as the law allows, the software comes as is, without +any warranty or condition, and the licensor will not be liable +to you for any damages arising out of these terms or the use +or nature of the software, under any kind of legal claim.*** + +## Definitions + +The **licensor** is the individual or entity offering these +terms, and the **software** is the software the licensor makes +available under these terms. + +A **product** can be a good or service, or a combination +of them. + +**You** refers to the individual or entity agreeing to these +terms. + +**Your company** is any legal entity, sole proprietorship, +or other kind of organization that you work for, plus all +its affiliates. + +**Affiliates** means the other organizations than an +organization has control over, is under the control of, or is +under common control with. + +**Control** means ownership of substantially all the assets of +an entity, or the power to direct its management and policies +by vote, contract, or otherwise. Control can be direct or +indirect. + +**Your licenses** are all the licenses granted to you for the +software under these terms. + +**Use** means anything you do with the software requiring one +of your licenses. From 2b0ec123cd9bb10e936a6758660e59c3faae1730 Mon Sep 17 00:00:00 2001 From: Toran Bruce Richards Date: Tue, 24 Sep 2024 23:16:24 +0100 Subject: [PATCH 07/12] Update CONTRIBUTING.md --- CONTRIBUTING.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9e083c570922..fb29d72d3838 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -10,6 +10,9 @@ Also check out our [🚀 Roadmap][roadmap] for information about our priorities [roadmap]: https://github.com/Significant-Gravitas/AutoGPT/discussions/6971 [kanban board]: https://github.com/orgs/Significant-Gravitas/projects/1 +## Contributing to the AutoGPT Platform Folder +All contributions to [the autogpt_platform folder](https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform) will be under our [Contribution License Agreement](https://github.com/Significant-Gravitas/AutoGPT/blob/master/autogpt_platform/Contributor%20License%20Agreement%20(CLA).md). By making a pull request contributing to this folder, you agree to the terms of our CLA for your contribution. + ## In short 1. Avoid duplicate work, issues, PRs etc. 2. We encourage you to collaborate with fellow community members on some of our bigger From 6da8007ce02e7ef27096fb97101af7b9fd36a24d Mon Sep 17 00:00:00 2001 From: Zamil Majdy Date: Tue, 24 Sep 2024 17:43:54 -0500 Subject: [PATCH 08/12] fix(platform): Refresh doc setup instruction (#8142) --- autogpt_platform/README.md | 49 +++++++++---- docs/content/server/setup.md | 138 +++++++++++++++-------------------- 2 files changed, 93 insertions(+), 94 deletions(-) diff --git a/autogpt_platform/README.md b/autogpt_platform/README.md index 4415ce1a3c6d..db642809323f 100644 --- a/autogpt_platform/README.md +++ b/autogpt_platform/README.md @@ -8,40 +8,61 @@ Welcome to the AutoGPT Platform - a powerful system for creating and running AI - Docker - Docker Compose V2 (comes with Docker Desktop, or can be installed separately) +- Node.js & NPM (for running the frontend application) ### Running the System To run the AutoGPT Platform, follow these steps: -1. Clone this repository to your local machine. -2. Navigate to autogpt_platform/supabase -3. Run the following command: +1. Clone this repository to your local machine and navigate to the `autogpt_platform` directory within the repository: + ``` + git clone + cd AutoGPT/autogpt_platform + ``` + +2. Run the following command: ``` - git submodule update --init --recursive + git submodule update --init --recursive ``` -4. Navigate back to autogpt_platform (cd ..) -5. Run the following command: + This command will initialize and update the submodules in the repository. The `supabase` folder will be cloned to the root directory. + +3. Run the following command: ``` - cp supabase/docker/.env.example .env + cp supabase/docker/.env.example .env ``` -6. Run the following command: + This command will copy the `.env.example` file to `.env` in the `supabase/docker` directory. You can modify the `.env` file to add your own environment variables. +4. Run the following command: ``` docker compose up -d + ``` + This command will start all the necessary backend services defined in the `docker-compose.yml` file in detached mode. + +5. Navigate to `frontend` within the `autogpt_platform` directory: + ``` + cd frontend + ``` + You will need to run your frontend application separately on your local machine. +6. Run the following command: + ``` + cp .env.example .env ``` + This command will copy the `.env.example` file to `.env` in the `frontend` directory. You can modify the `.env` within this folder to add your own environment variables for the frontend application. - This command will start all the necessary backend services defined in the `docker-compose.combined.yml` file in detached mode. -7. Navigate to autogpt_platform/frontend. -8. Run the following command: +7. Run the following command: ``` - cp .env.example .env.local + npm install + npm run dev ``` -9. Run the following command: + This command will install the necessary dependencies and start the frontend application in development mode. + If you are using Yarn, you can run the following commands instead: ``` - yarn dev + yarn install && yarn dev ``` +8. Open your browser and navigate to `http://localhost:3000` to access the AutoGPT Platform frontend. + ### Docker Compose Commands Here are some useful Docker Compose commands for managing your AutoGPT Platform: diff --git a/docs/content/server/setup.md b/docs/content/server/setup.md index cf850cfae2d7..3a33fc781fc4 100644 --- a/docs/content/server/setup.md +++ b/docs/content/server/setup.md @@ -19,121 +19,99 @@ We also offer this in video format. You can check it out [here](https://github.c To setup the server, you need to have the following installed: - [Node.js](https://nodejs.org/en/) -- [Python 3.10](https://www.python.org/downloads/) +- [Docker](https://docs.docker.com/get-docker/) -### Checking if you have Node.js and Python installed +### Checking if you have Node.js & NPM installed -You can check if you have Node.js installed by running the following command: +We use Node.js to run our frontend application. -```bash -node -v -``` - -You can check if you have Python installed by running the following command: - -```bash -python --version -``` - -Once you have node and python installed, you can proceed to the next step. +If you need assistance installing Node.js: +https://nodejs.org/en/download/ -### Installing the package managers +NPM is included with Node.js, but if you need assistance installing NPM: +https://docs.npmjs.com/downloading-and-installing-node-js-and-npm -In order to install the dependencies, you need to have the appropriate package managers installed. - -- Installing Yarn - -Yarn is a package manager for Node.js. You can install it by running the following command: +You can check if you have Node.js & NPM installed by running the following command: ```bash -npm install -g yarn +node -v +npm -v ``` -- Installing Poetry +Once you have Node.js installed, you can proceed to the next step. -Poetry is a package manager for Python. You can install it by running the following command: - -```bash -pip install poetry -``` -- Installing Docker and Docker Compose +### Checking if you have Docker & Docker Compose installed Docker containerizes applications, while Docker Compose orchestrates multi-container Docker applications. -You can follow the steps here: - If you need assistance installing docker: https://docs.docker.com/desktop/ -If you need assistance installing docker compose: -https://docs.docker.com/compose/install/ -### Installing the dependencies +Docker-compose is included in Docker Desktop, but if you need assistance installing docker compose: +https://docs.docker.com/compose/install/ -Once you have installed Yarn and Poetry, you can run the following command to install the dependencies: +You can check if you have Docker installed by running the following command: ```bash -cd autogpt_platform/backend -cp .env.example .env -poetry install +docker -v +docker-compose -v ``` -**In another terminal**, run the following command to install the dependencies for the frontend: +Once you have Docker and Docker Compose installed, you can proceed to the next step. -```bash -cd autogpt_platform/frontend -yarn install -``` +## Running the backend services -Once you have installed the dependencies, you can proceed to the next step. +To run the backend services, follow these steps: -### Setting up the database +* Within the repository, clone the submodules and navigate to the `autogpt_platform` directory: + ```bash + git submodule update --init --recursive + cd autogpt_platform + ``` + This command will initialize and update the submodules in the repository. The `supabase` folder will be cloned to the root directory. -In order to setup the database, you need to run the following commands, in the same terminal you ran the `poetry install` command: +* Copy the `.env.example` file available in the `supabase/docker` directory to `.env` in `autogpt_platform`: + ``` + cp supabase/docker/.env.example .env + ``` + This command will copy the `.env.example` file to `.env` in the `supabase/docker` directory. You can modify the `.env` file to add your own environment variables. - ```sh - docker compose up postgres redis -d - poetry run prisma migrate dev - ``` -After deploying the migration, to ensure that the database schema is correctly mapped to your codebase, allowing the application to interact with the database properly, you need to generate the Prisma database model: +* Run the backend services: + ``` + docker compose up -d + ``` + This command will start all the necessary backend services defined in the `docker-compose.combined.yml` file in detached mode. -```bash -poetry run prisma generate -``` -Without running this command, the necessary Python modules (prisma.models) won't be available, leading to a `ModuleNotFoundError`. +## Running the frontend application -### Get access to Supabase +To run the frontend application, follow these steps: -Navigate to rnd/supabase -Run the following command: +* Navigate to `frontend` folder within the `autogpt_platform` directory: + ``` + cd frontend + ``` -```bash - git submodule update --init --recursive -``` -### Running the server +* Copy the `.env.example` file available in the `frontend` directory to `.env` in the same directory: + ``` + cp .env.example .env + ``` + You can modify the `.env` within this folder to add your own environment variables for the frontend application. -To run the server, navigate back to rnd (cd..) and run the following commands in the same terminal you ran the `poetry install` command: - -```bash -cp supabase/docker/.env.example .env -docker compose build -docker compose up -d -``` - -In the other terminal from frontend, you can run the following command to start the frontend: - -```bash -cp .env.example .env -yarn dev -``` +* Run the following command: + ``` + npm install + npm run dev + ``` + This command will install the necessary dependencies and start the frontend application in development mode. -### Checking if the server is running +## Checking if the application is running You can check if the server is running by visiting [http://localhost:3000](http://localhost:3000) in your browser. ### Notes: -By default the daemons for different services run on the following ports: +By default the application for different services run on the following ports: -Execution Manager Daemon: 8002 -Execution Scheduler Daemon: 8003 -Rest Server Daemon: 8004 +Frontend UI Server: 3000 +Backend Websocket Server: 8001 +Execution API Rest Server: 8006 From 81d1be73cd8ba746988cb19d95ac76f47aca87e8 Mon Sep 17 00:00:00 2001 From: Zamil Majdy Date: Tue, 24 Sep 2024 18:11:15 -0500 Subject: [PATCH 09/12] feat(platform): Add OpenAI reasoning models (#8152) --- .../backend/backend/blocks/llm.py | 32 +++++++++++++++---- 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/autogpt_platform/backend/backend/blocks/llm.py b/autogpt_platform/backend/backend/blocks/llm.py index e2bdfd8c2485..ead18b2dbf7e 100644 --- a/autogpt_platform/backend/backend/blocks/llm.py +++ b/autogpt_platform/backend/backend/blocks/llm.py @@ -30,6 +30,8 @@ class ModelMetadata(NamedTuple): class LlmModel(str, Enum): # OpenAI models + O1_PREVIEW = "o1-preview" + O1_MINI = "o1-mini" GPT4O_MINI = "gpt-4o-mini" GPT4O = "gpt-4o" GPT4_TURBO = "gpt-4-turbo" @@ -57,6 +59,8 @@ def metadata(self) -> ModelMetadata: MODEL_METADATA = { + LlmModel.O1_PREVIEW: ModelMetadata("openai", 32000, cost_factor=60), + LlmModel.O1_MINI: ModelMetadata("openai", 62000, cost_factor=30), LlmModel.GPT4O_MINI: ModelMetadata("openai", 128000, cost_factor=10), LlmModel.GPT4O: ModelMetadata("openai", 128000, cost_factor=12), LlmModel.GPT4_TURBO: ModelMetadata("openai", 128000, cost_factor=11), @@ -84,7 +88,10 @@ def metadata(self) -> ModelMetadata: class AIStructuredResponseGeneratorBlock(Block): class Input(BlockSchema): prompt: str - expected_format: dict[str, str] + expected_format: dict[str, str] = SchemaField( + description="Expected format of the response. If provided, the response will be validated against this format. " + "The keys should be the expected fields in the response, and the values should be the description of the field.", + ) model: LlmModel = LlmModel.GPT4_TURBO api_key: BlockSecret = SecretField(value="") sys_prompt: str = "" @@ -132,7 +139,18 @@ def llm_call( if provider == "openai": openai.api_key = api_key - response_format = {"type": "json_object"} if json_format else None + response_format = None + + if model in [LlmModel.O1_MINI, LlmModel.O1_PREVIEW]: + sys_messages = [p["content"] for p in prompt if p["role"] == "system"] + usr_messages = [p["content"] for p in prompt if p["role"] != "system"] + prompt = [ + {"role": "user", "content": "\n".join(sys_messages)}, + {"role": "user", "content": "\n".join(usr_messages)}, + ] + elif json_format: + response_format = {"type": "json_object"} + response = openai.chat.completions.create( model=model.value, messages=prompt, # type: ignore @@ -207,11 +225,11 @@ def trim_prompt(s: str) -> str: format_prompt = ",\n ".join(expected_format) sys_prompt = trim_prompt( f""" - |Reply in json format: - |{{ - | {format_prompt} - |}} - """ + |Reply strictly only in the following JSON format: + |{{ + | {format_prompt} + |}} + """ ) prompt.append({"role": "system", "content": sys_prompt}) From b78c43111f72a779aeb4b26d68f4e5d9b04ea91b Mon Sep 17 00:00:00 2001 From: Zamil Majdy Date: Tue, 24 Sep 2024 18:15:45 -0500 Subject: [PATCH 10/12] feat(platform): Make REST & WS server host configurable (#8143) --- autogpt_platform/backend/backend/server/rest_api.py | 7 ++++++- autogpt_platform/backend/backend/server/ws_api.py | 6 +++++- autogpt_platform/backend/backend/util/settings.py | 10 ++++++++++ 3 files changed, 21 insertions(+), 2 deletions(-) diff --git a/autogpt_platform/backend/backend/server/rest_api.py b/autogpt_platform/backend/backend/server/rest_api.py index 9f3afd2fba68..9ddaa5bdc031 100644 --- a/autogpt_platform/backend/backend/server/rest_api.py +++ b/autogpt_platform/backend/backend/server/rest_api.py @@ -251,7 +251,12 @@ def run_service(self): app.include_router(api_router) - uvicorn.run(app, host="0.0.0.0", port=Config().agent_api_port, log_config=None) + uvicorn.run( + app, + host=Config().agent_api_host, + port=Config().agent_api_port, + log_config=None, + ) def set_test_dependency_overrides(self, overrides: dict): self._test_dependency_overrides = overrides diff --git a/autogpt_platform/backend/backend/server/ws_api.py b/autogpt_platform/backend/backend/server/ws_api.py index cbfe00e439d9..0bf8231eeb33 100644 --- a/autogpt_platform/backend/backend/server/ws_api.py +++ b/autogpt_platform/backend/backend/server/ws_api.py @@ -174,4 +174,8 @@ async def websocket_router( class WebsocketServer(AppProcess): def run(self): - uvicorn.run(app, host="0.0.0.0", port=Config().websocket_server_port) + uvicorn.run( + app, + host=Config().websocket_server_host, + port=Config().websocket_server_port, + ) diff --git a/autogpt_platform/backend/backend/util/settings.py b/autogpt_platform/backend/backend/util/settings.py index 230fa14f5282..c81ac09bd8ec 100644 --- a/autogpt_platform/backend/backend/util/settings.py +++ b/autogpt_platform/backend/backend/util/settings.py @@ -80,6 +80,11 @@ class Config(UpdateTrackingModel["Config"], BaseSettings): extra="allow", ) + websocket_server_host: str = Field( + default="0.0.0.0", + description="The host for the websocket server to run on", + ) + websocket_server_port: int = Field( default=8001, description="The port for the websocket server to run on", @@ -100,6 +105,11 @@ class Config(UpdateTrackingModel["Config"], BaseSettings): description="The port for agent server daemon to run on", ) + agent_api_host: str = Field( + default="0.0.0.0", + description="The host for agent server API to run on", + ) + agent_api_port: int = Field( default=8006, description="The port for agent server API to run on", From 03b8f5ec6ed986c7dcb57913ceaf1879de9d558e Mon Sep 17 00:00:00 2001 From: Swifty Date: Wed, 25 Sep 2024 01:19:50 +0200 Subject: [PATCH 11/12] feat(marketplace): Added a list of keywords to describe agents (#8146) * Added more keywords * formatting --- .../src/app/marketplace/submit/page.tsx | 54 ++++++++++++++++--- 1 file changed, 48 insertions(+), 6 deletions(-) diff --git a/autogpt_platform/frontend/src/app/marketplace/submit/page.tsx b/autogpt_platform/frontend/src/app/marketplace/submit/page.tsx index b6fdeae9a1db..a489e9087ba3 100644 --- a/autogpt_platform/frontend/src/app/marketplace/submit/page.tsx +++ b/autogpt_platform/frontend/src/app/marketplace/submit/page.tsx @@ -37,6 +37,49 @@ type FormData = { selectedAgentId: string; }; +const keywords = [ + "Automation", + "AI Workflows", + "Integration", + "Task Automation", + "Data Processing", + "Workflow Management", + "Real-time Analytics", + "Custom Triggers", + "Event-driven", + "API Integration", + "Data Transformation", + "Multi-step Workflows", + "Collaboration Tools", + "Business Process Automation", + "No-code Solutions", + "AI-Powered", + "Smart Notifications", + "Data Syncing", + "User Engagement", + "Reporting Automation", + "Lead Generation", + "Customer Support Automation", + "E-commerce Automation", + "Social Media Management", + "Email Marketing Automation", + "Document Management", + "Data Enrichment", + "Performance Tracking", + "Predictive Analytics", + "Resource Allocation", + "Chatbot", + "Virtual Assistant", + "Workflow Automation", + "Social Media Manager", + "Email Optimizer", + "Content Generator", + "Data Analyzer", + "Task Scheduler", + "Customer Service Bot", + "Personalization Engine", +]; + const SubmitPage: React.FC = () => { const router = useRouter(); const { @@ -292,12 +335,11 @@ const SubmitPage: React.FC = () => { - - Keyword 1 - - - Keyword 2 - + {keywords.map((keyword) => ( + + {keyword} + + ))} {/* Add more predefined keywords as needed */} From 46b8f9af0ad0c2522ef4354b33eb425389180aee Mon Sep 17 00:00:00 2001 From: Nicholas Tindle Date: Tue, 24 Sep 2024 18:31:38 -0500 Subject: [PATCH 12/12] feat(builder): scaffold playwright (#8109) --- .github/workflows/platform-frontend-ci.yml | 88 ++++++++++++++----- autogpt_platform/frontend/.gitignore | 5 ++ autogpt_platform/frontend/package.json | 6 +- .../frontend/playwright.config.ts | 81 +++++++++++++++++ .../frontend/src/tests/title.spec.ts | 8 ++ autogpt_platform/frontend/yarn.lock | 26 ++++++ 6 files changed, 190 insertions(+), 24 deletions(-) create mode 100644 autogpt_platform/frontend/playwright.config.ts create mode 100644 autogpt_platform/frontend/src/tests/title.spec.ts diff --git a/.github/workflows/platform-frontend-ci.yml b/.github/workflows/platform-frontend-ci.yml index cbd07ea6bcf0..72c3888c1d82 100644 --- a/.github/workflows/platform-frontend-ci.yml +++ b/.github/workflows/platform-frontend-ci.yml @@ -2,14 +2,14 @@ name: AutoGPT Platform - Frontend CI on: push: - branches: [ master ] + branches: [master] paths: - - '.github/workflows/platform-frontend-ci.yml' - - 'autogpt_platform/frontend/**' + - ".github/workflows/platform-frontend-ci.yml" + - "autogpt_platform/frontend/**" pull_request: paths: - - '.github/workflows/platform-frontend-ci.yml' - - 'autogpt_platform/frontend/**' + - ".github/workflows/platform-frontend-ci.yml" + - "autogpt_platform/frontend/**" defaults: run: @@ -17,25 +17,67 @@ defaults: working-directory: autogpt_platform/frontend jobs: - lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - name: Set up Node.js - uses: actions/setup-node@v4 - with: - node-version: '21' - - - name: Install dependencies - run: | - npm install - - - name: Check formatting with Prettier - run: | - npx prettier --check . - - - name: Run lint - run: | - npm run lint + - uses: actions/checkout@v4 + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: "21" + + - name: Install dependencies + run: | + npm install + + - name: Check formatting with Prettier + run: | + npx prettier --check . + + - name: Run lint + run: | + npm run lint + + test: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + submodules: recursive + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: "21" + + - name: Copy default supabase .env + run: | + cp ../supabase/docker/.env.example ../.env + + - name: Run docker compose + run: | + docker compose -f ../docker-compose.yml up -d + + - name: Install dependencies + run: | + npm install + + - name: Setup Builder .env + run: | + cp .env.example .env + + - name: Install Playwright Browsers + run: npx playwright install --with-deps + + - name: Run tests + run: | + npm run test + + - uses: actions/upload-artifact@v4 + if: ${{ !cancelled() }} + with: + name: playwright-report + path: playwright-report/ + retention-days: 30 diff --git a/autogpt_platform/frontend/.gitignore b/autogpt_platform/frontend/.gitignore index 1dd45b202245..cfe0cde0bb2d 100644 --- a/autogpt_platform/frontend/.gitignore +++ b/autogpt_platform/frontend/.gitignore @@ -37,3 +37,8 @@ next-env.d.ts # Sentry Config File .env.sentry-build-plugin +node_modules/ +/test-results/ +/playwright-report/ +/blob-report/ +/playwright/.cache/ diff --git a/autogpt_platform/frontend/package.json b/autogpt_platform/frontend/package.json index d37c0977b9b1..cc4c01649aa3 100644 --- a/autogpt_platform/frontend/package.json +++ b/autogpt_platform/frontend/package.json @@ -9,7 +9,10 @@ "build": "next build", "start": "next start", "lint": "next lint", - "format": "prettier --write ." + "format": "prettier --write .", + "test": "playwright test", + "test-ui": "playwright test --ui", + "gentests": "playwright codegen http://localhost:3000" }, "dependencies": { "@hookform/resolvers": "^3.9.0", @@ -59,6 +62,7 @@ "zod": "^3.23.8" }, "devDependencies": { + "@playwright/test": "^1.47.1", "@types/node": "^20", "@types/react": "^18", "@types/react-dom": "^18", diff --git a/autogpt_platform/frontend/playwright.config.ts b/autogpt_platform/frontend/playwright.config.ts new file mode 100644 index 000000000000..75c9f68d4c80 --- /dev/null +++ b/autogpt_platform/frontend/playwright.config.ts @@ -0,0 +1,81 @@ +import { defineConfig, devices } from "@playwright/test"; + +/** + * Read environment variables from file. + * https://github.com/motdotla/dotenv + */ +// import dotenv from 'dotenv'; +// import path from 'path'; +// dotenv.config({ path: path.resolve(__dirname, '.env') }); + +/** + * See https://playwright.dev/docs/test-configuration. + */ +export default defineConfig({ + testDir: "./src/tests", + /* Run tests in files in parallel */ + fullyParallel: true, + /* Fail the build on CI if you accidentally left test.only in the source code. */ + forbidOnly: !!process.env.CI, + /* Retry on CI only */ + retries: process.env.CI ? 2 : 0, + /* Opt out of parallel tests on CI. */ + workers: process.env.CI ? 1 : undefined, + /* Reporter to use. See https://playwright.dev/docs/test-reporters */ + reporter: "html", + /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ + use: { + /* Base URL to use in actions like `await page.goto('/')`. */ + baseURL: "http://localhost:3000/", + + /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */ + trace: "on-first-retry", + bypassCSP: true, + }, + + /* Configure projects for major browsers */ + projects: [ + { + name: "chromium", + use: { ...devices["Desktop Chrome"] }, + }, + + { + name: "firefox", + use: { ...devices["Desktop Firefox"] }, + }, + + { + name: "webkit", + use: { ...devices["Desktop Safari"] }, + }, + + /* Test against mobile viewports. */ + // { + // name: 'Mobile Chrome', + // use: { ...devices['Pixel 5'] }, + // }, + // { + // name: 'Mobile Safari', + // use: { ...devices['iPhone 12'] }, + // }, + + /* Test against branded browsers. */ + { + name: "Microsoft Edge", + use: { ...devices["Desktop Edge"], channel: "msedge" }, + }, + // { + // name: 'Google Chrome', + // use: { ...devices['Desktop Chrome'], channel: 'chrome' }, + // }, + ], + + /* Run your local dev server before starting the tests */ + webServer: { + command: "npm run build && npm run start", + url: "http://localhost:3000/", + reuseExistingServer: !process.env.CI, + timeout: 120 * 1000, + }, +}); diff --git a/autogpt_platform/frontend/src/tests/title.spec.ts b/autogpt_platform/frontend/src/tests/title.spec.ts new file mode 100644 index 000000000000..e7e95c949bd3 --- /dev/null +++ b/autogpt_platform/frontend/src/tests/title.spec.ts @@ -0,0 +1,8 @@ +import { test, expect } from "@playwright/test"; + +test("has title", async ({ page }) => { + await page.goto("/"); + + // Expect a title "to contain" a substring. + await expect(page).toHaveTitle(/NextGen AutoGPT/); +}); diff --git a/autogpt_platform/frontend/yarn.lock b/autogpt_platform/frontend/yarn.lock index 4efddaff7917..5318bb4280a7 100644 --- a/autogpt_platform/frontend/yarn.lock +++ b/autogpt_platform/frontend/yarn.lock @@ -655,6 +655,13 @@ resolved "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz" integrity sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg== +"@playwright/test@^1.47.1": + version "1.47.1" + resolved "https://registry.yarnpkg.com/@playwright/test/-/test-1.47.1.tgz#568a46229a5aef54b74977297a7946bb5ac4b67b" + integrity sha512-dbWpcNQZ5nj16m+A5UNScYx7HX5trIy7g4phrcitn+Nk83S32EBX/CLU4hiF4RGKX/yRc93AAqtfaXB7JWBd4Q== + dependencies: + playwright "1.47.1" + "@prisma/instrumentation@5.19.1": version "5.19.1" resolved "https://registry.yarnpkg.com/@prisma/instrumentation/-/instrumentation-5.19.1.tgz#146319cf85f22b7a43296f0f40cfeac55516e66e" @@ -3173,6 +3180,11 @@ fs.realpath@^1.0.0: resolved "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz" integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw== +fsevents@2.3.2: + version "2.3.2" + resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" + integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== + fsevents@~2.3.2: version "2.3.3" resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6" @@ -4604,6 +4616,20 @@ pirates@^4.0.1: resolved "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz" integrity sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg== +playwright-core@1.47.1: + version "1.47.1" + resolved "https://registry.yarnpkg.com/playwright-core/-/playwright-core-1.47.1.tgz#bb45bdfb0d48412c535501aa3805867282857df8" + integrity sha512-i1iyJdLftqtt51mEk6AhYFaAJCDx0xQ/O5NU8EKaWFgMjItPVma542Nh/Aq8aLCjIJSzjaiEQGW/nyqLkGF1OQ== + +playwright@1.47.1: + version "1.47.1" + resolved "https://registry.yarnpkg.com/playwright/-/playwright-1.47.1.tgz#cdc1116f5265b8d2ff7be0d8942d49900634dc6c" + integrity sha512-SUEKi6947IqYbKxRiqnbUobVZY4bF1uu+ZnZNJX9DfU1tlf2UhWfvVjLf01pQx9URsOr18bFVUKXmanYWhbfkw== + dependencies: + playwright-core "1.47.1" + optionalDependencies: + fsevents "2.3.2" + possible-typed-array-names@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz"