Skip to content

Commit

Permalink
Merge branch 'dev' into zamilmajdy/add-token-count-as-stats
Browse files Browse the repository at this point in the history
  • Loading branch information
majdyz authored Oct 22, 2024
2 parents 16faf01 + 0c51721 commit f845a7e
Show file tree
Hide file tree
Showing 30 changed files with 447 additions and 77 deletions.
152 changes: 152 additions & 0 deletions .github/workflows/platform-autogpt-deploy.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
name: AutoGPT Platform - Build, Push, and Deploy Dev Environment

on:
push:
branches: [ dev ]
paths:
- 'autogpt_platform/backend/**'
- 'autogpt_platform/frontend/**'
- 'autogpt_platform/market/**'

permissions:
contents: 'read'
id-token: 'write'

env:
PROJECT_ID: ${{ secrets.GCP_PROJECT_ID }}
GKE_CLUSTER: dev-gke-cluster
GKE_ZONE: us-central1-a
NAMESPACE: dev-agpt

jobs:
build-push-deploy:
name: Build, Push, and Deploy
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v2
with:
fetch-depth: 0

- id: 'auth'
uses: 'google-github-actions/auth@v1'
with:
workload_identity_provider: 'projects/638488734936/locations/global/workloadIdentityPools/dev-pool/providers/github'
service_account: '[email protected]'
token_format: 'access_token'
create_credentials_file: true

- name: 'Set up Cloud SDK'
uses: 'google-github-actions/setup-gcloud@v1'

- name: 'Configure Docker'
run: |
gcloud auth configure-docker us-east1-docker.pkg.dev
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1

- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
- name: Check for changes
id: check_changes
run: |
git fetch origin dev
BACKEND_CHANGED=$(git diff --name-only origin/dev HEAD | grep "^autogpt_platform/backend/" && echo "true" || echo "false")
FRONTEND_CHANGED=$(git diff --name-only origin/dev HEAD | grep "^autogpt_platform/frontend/" && echo "true" || echo "false")
MARKET_CHANGED=$(git diff --name-only origin/dev HEAD | grep "^autogpt_platform/market/" && echo "true" || echo "false")
echo "backend_changed=$BACKEND_CHANGED" >> $GITHUB_OUTPUT
echo "frontend_changed=$FRONTEND_CHANGED" >> $GITHUB_OUTPUT
echo "market_changed=$MARKET_CHANGED" >> $GITHUB_OUTPUT
- name: Get GKE credentials
uses: 'google-github-actions/get-gke-credentials@v1'
with:
cluster_name: ${{ env.GKE_CLUSTER }}
location: ${{ env.GKE_ZONE }}

- name: Build and Push Backend
if: steps.check_changes.outputs.backend_changed == 'true'
uses: docker/build-push-action@v2
with:
context: .
file: ./autogpt_platform/backend/Dockerfile
push: true
tags: us-east1-docker.pkg.dev/agpt-dev/agpt-backend-dev/agpt-backend-dev:${{ github.sha }}
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max

- name: Build and Push Frontend
if: steps.check_changes.outputs.frontend_changed == 'true'
uses: docker/build-push-action@v2
with:
context: .
file: ./autogpt_platform/frontend/Dockerfile
push: true
tags: us-east1-docker.pkg.dev/agpt-dev/agpt-frontend-dev/agpt-frontend-dev:${{ github.sha }}
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max

- name: Build and Push Market
if: steps.check_changes.outputs.market_changed == 'true'
uses: docker/build-push-action@v2
with:
context: .
file: ./autogpt_platform/market/Dockerfile
push: true
tags: us-east1-docker.pkg.dev/agpt-dev/agpt-market-dev/agpt-market-dev:${{ github.sha }}
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new,mode=max

- name: Move cache
run: |
rm -rf /tmp/.buildx-cache
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
- name: Set up Helm
uses: azure/setup-helm@v1
with:
version: v3.4.0

- name: Deploy Backend
if: steps.check_changes.outputs.backend_changed == 'true'
run: |
helm upgrade autogpt-server ./autogpt-server \
--namespace ${{ env.NAMESPACE }} \
-f autogpt-server/values.yaml \
-f autogpt-server/values.dev.yaml \
--set image.tag=${{ github.sha }}
- name: Deploy Websocket
if: steps.check_changes.outputs.backend_changed == 'true'
run: |
helm upgrade autogpt-websocket-server ./autogpt-websocket-server \
--namespace ${{ env.NAMESPACE }} \
-f autogpt-websocket-server/values.yaml \
-f autogpt-websocket-server/values.dev.yaml \
--set image.tag=${{ github.sha }}
- name: Deploy Market
if: steps.check_changes.outputs.market_changed == 'true'
run: |
helm upgrade autogpt-market ./autogpt-market \
--namespace ${{ env.NAMESPACE }} \
-f autogpt-market/values.yaml \
-f autogpt-market/values.dev.yaml \
--set image.tag=${{ github.sha }}
- name: Deploy Frontend
if: steps.check_changes.outputs.frontend_changed == 'true'
run: |
helm upgrade autogpt-builder ./autogpt-builder \
--namespace ${{ env.NAMESPACE }} \
-f autogpt-builder/values.yaml \
-f autogpt-builder/values.dev.yaml \
--set image.tag=${{ github.sha }}
17 changes: 17 additions & 0 deletions .github/workflows/platform-frontend-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,27 @@ jobs:
runs-on: ubuntu-latest

steps:
- name: Free Disk Space (Ubuntu)
uses: jlumbroso/free-disk-space@main
with:
# this might remove tools that are actually needed,
# if set to "true" but frees about 6 GB
tool-cache: false

# all of these default to true, but feel free to set to
# "false" if necessary for your workflow
android: false
dotnet: false
haskell: false
large-packages: true
docker-images: true
swap-storage: true

- name: Checkout repository
uses: actions/checkout@v4
with:
submodules: recursive

- name: Set up Node.js
uses: actions/setup-node@v4
with:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import secrets
from datetime import datetime, timedelta, timezone
from typing import TYPE_CHECKING, cast
from typing import TYPE_CHECKING

if TYPE_CHECKING:
from redis import Redis
from supabase import Client
from backend.executor.database import DatabaseManager

from autogpt_libs.utils.synchronize import RedisKeyedMutex

Expand All @@ -18,8 +18,8 @@


class SupabaseIntegrationCredentialsStore:
def __init__(self, supabase: "Client", redis: "Redis"):
self.supabase = supabase
def __init__(self, redis: "Redis", db: "DatabaseManager"):
self.db_manager: DatabaseManager = db
self.locks = RedisKeyedMutex(redis)

def add_creds(self, user_id: str, credentials: Credentials) -> None:
Expand All @@ -35,7 +35,9 @@ def add_creds(self, user_id: str, credentials: Credentials) -> None:

def get_all_creds(self, user_id: str) -> list[Credentials]:
user_metadata = self._get_user_metadata(user_id)
return UserMetadata.model_validate(user_metadata).integration_credentials
return UserMetadata.model_validate(
user_metadata.model_dump()
).integration_credentials

def get_creds_by_id(self, user_id: str, credentials_id: str) -> Credentials | None:
all_credentials = self.get_all_creds(user_id)
Expand Down Expand Up @@ -90,9 +92,7 @@ def delete_creds_by_id(self, user_id: str, credentials_id: str) -> None:
]
self._set_user_integration_creds(user_id, filtered_credentials)

async def store_state_token(
self, user_id: str, provider: str, scopes: list[str]
) -> str:
def store_state_token(self, user_id: str, provider: str, scopes: list[str]) -> str:
token = secrets.token_urlsafe(32)
expires_at = datetime.now(timezone.utc) + timedelta(minutes=10)

Expand All @@ -105,17 +105,17 @@ async def store_state_token(

with self.locked_user_metadata(user_id):
user_metadata = self._get_user_metadata(user_id)
oauth_states = user_metadata.get("integration_oauth_states", [])
oauth_states = user_metadata.integration_oauth_states
oauth_states.append(state.model_dump())
user_metadata["integration_oauth_states"] = oauth_states
user_metadata.integration_oauth_states = oauth_states

self.supabase.auth.admin.update_user_by_id(
user_id, {"user_metadata": user_metadata}
self.db_manager.update_user_metadata(
user_id=user_id, metadata=user_metadata
)

return token

async def get_any_valid_scopes_from_state_token(
def get_any_valid_scopes_from_state_token(
self, user_id: str, token: str, provider: str
) -> list[str]:
"""
Expand All @@ -126,7 +126,7 @@ async def get_any_valid_scopes_from_state_token(
THE CODE FOR TOKENS.
"""
user_metadata = self._get_user_metadata(user_id)
oauth_states = user_metadata.get("integration_oauth_states", [])
oauth_states = user_metadata.integration_oauth_states

now = datetime.now(timezone.utc)
valid_state = next(
Expand All @@ -145,10 +145,10 @@ async def get_any_valid_scopes_from_state_token(

return []

async def verify_state_token(self, user_id: str, token: str, provider: str) -> bool:
def verify_state_token(self, user_id: str, token: str, provider: str) -> bool:
with self.locked_user_metadata(user_id):
user_metadata = self._get_user_metadata(user_id)
oauth_states = user_metadata.get("integration_oauth_states", [])
oauth_states = user_metadata.integration_oauth_states

now = datetime.now(timezone.utc)
valid_state = next(
Expand All @@ -165,10 +165,8 @@ async def verify_state_token(self, user_id: str, token: str, provider: str) -> b
if valid_state:
# Remove the used state
oauth_states.remove(valid_state)
user_metadata["integration_oauth_states"] = oauth_states
self.supabase.auth.admin.update_user_by_id(
user_id, {"user_metadata": user_metadata}
)
user_metadata.integration_oauth_states = oauth_states
self.db_manager.update_user_metadata(user_id, user_metadata)
return True

return False
Expand All @@ -177,19 +175,13 @@ def _set_user_integration_creds(
self, user_id: str, credentials: list[Credentials]
) -> None:
raw_metadata = self._get_user_metadata(user_id)
raw_metadata.update(
{"integration_credentials": [c.model_dump() for c in credentials]}
)
self.supabase.auth.admin.update_user_by_id(
user_id, {"user_metadata": raw_metadata}
)
raw_metadata.integration_credentials = [c.model_dump() for c in credentials]
self.db_manager.update_user_metadata(user_id, raw_metadata)

def _get_user_metadata(self, user_id: str) -> UserMetadataRaw:
response = self.supabase.auth.admin.get_user_by_id(user_id)
if not response.user:
raise ValueError(f"User with ID {user_id} not found")
return cast(UserMetadataRaw, response.user.user_metadata)
metadata: UserMetadataRaw = self.db_manager.get_user_metadata(user_id=user_id)
return metadata

def locked_user_metadata(self, user_id: str):
key = (self.supabase.supabase_url, f"user:{user_id}", "metadata")
key = (self.db_manager, f"user:{user_id}", "metadata")
return self.locks.locked(key)
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ class OAuthState(BaseModel):
token: str
provider: str
expires_at: int
scopes: list[str]
"""Unix timestamp (seconds) indicating when this OAuth state expires"""


Expand All @@ -64,6 +65,6 @@ class UserMetadata(BaseModel):
integration_oauth_states: list[OAuthState] = Field(default_factory=list)


class UserMetadataRaw(TypedDict, total=False):
integration_credentials: list[dict]
integration_oauth_states: list[dict]
class UserMetadataRaw(BaseModel):
integration_credentials: list[dict] = Field(default_factory=list)
integration_oauth_states: list[dict] = Field(default_factory=list)
2 changes: 1 addition & 1 deletion autogpt_platform/backend/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ WORKDIR /app

# Install build dependencies
RUN apt-get update \
&& apt-get install -y build-essential curl ffmpeg wget libcurl4-gnutls-dev libexpat1-dev gettext libz-dev libssl-dev postgresql-client git \
&& apt-get install -y build-essential curl ffmpeg wget libcurl4-gnutls-dev libexpat1-dev libpq5 gettext libz-dev libssl-dev postgresql-client git \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*

Expand Down
4 changes: 2 additions & 2 deletions autogpt_platform/backend/README.advanced.md
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ We use the Poetry to manage the dependencies. To set up the project, follow thes
5. Generate the Prisma client

```sh
poetry run prisma generate --schema postgres/schema.prisma
poetry run prisma generate
```


Expand All @@ -61,7 +61,7 @@ We use the Poetry to manage the dependencies. To set up the project, follow thes

```sh
cd ../backend
prisma migrate dev --schema postgres/schema.prisma
prisma migrate deploy
```

## Running The Server
Expand Down
2 changes: 1 addition & 1 deletion autogpt_platform/backend/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ We use the Poetry to manage the dependencies. To set up the project, follow thes

```sh
docker compose up db redis -d
poetry run prisma migrate dev
poetry run prisma migrate deploy
```

## Running The Server
Expand Down
2 changes: 1 addition & 1 deletion autogpt_platform/backend/backend/blocks/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
GPT4_TURBO = "gpt-4-turbo"
GPT3_5_TURBO = "gpt-3.5-turbo"
# Anthropic models
CLAUDE_3_5_SONNET = "claude-3-5-sonnet-20240620"
CLAUDE_3_5_SONNET = "claude-3-5-sonnet-latest"
CLAUDE_3_HAIKU = "claude-3-haiku-20240307"
# Groq models
LLAMA3_8B = "llama3-8b-8192"
Expand Down
Loading

0 comments on commit f845a7e

Please sign in to comment.