Skip to content

Commit

Permalink
Fix type hinting
Browse files Browse the repository at this point in the history
  • Loading branch information
majdyz committed Oct 25, 2024
1 parent 0cd221f commit cca81c4
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 20 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from redis import Redis
from backend.executor.database import DatabaseManager

from autogpt_libs.utils.cache import thread_cached_property
from autogpt_libs.utils.cache import thread_cached
from autogpt_libs.utils.synchronize import RedisKeyedMutex

from .types import (
Expand All @@ -21,8 +21,9 @@
class SupabaseIntegrationCredentialsStore:
def __init__(self, redis: "Redis"):
self.locks = RedisKeyedMutex(redis)

@thread_cached_property

@property
@thread_cached
def db_manager(self) -> "DatabaseManager":
from backend.executor.database import DatabaseManager
from backend.util.service import get_service_client
Expand Down
9 changes: 1 addition & 8 deletions autogpt_platform/autogpt_libs/autogpt_libs/utils/cache.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,13 @@
from typing import Callable, TypeVar, ParamSpec
import threading
from functools import wraps
from typing import Callable, ParamSpec, TypeVar

T = TypeVar("T")
P = ParamSpec("P")
R = TypeVar("R")


def thread_cached(func: Callable[P, R]) -> Callable[P, R]:
thread_local = threading.local()

@wraps(func)
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
cache = getattr(thread_local, "cache", None)
if cache is None:
Expand All @@ -21,7 +18,3 @@ def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
return cache[key]

return wrapper


def thread_cached_property(func: Callable[[T], R]) -> property:
return property(thread_cached(func))
5 changes: 3 additions & 2 deletions autogpt_platform/backend/backend/executor/scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from apscheduler.schedulers.background import BackgroundScheduler
from apscheduler.triggers.cron import CronTrigger
from autogpt_libs.utils.cache import thread_cached_property
from autogpt_libs.utils.cache import thread_cached

from backend.data.block import BlockInput
from backend.data.schedule import (
Expand Down Expand Up @@ -37,7 +37,8 @@ def __init__(self, refresh_interval=10):
def get_port(cls) -> int:
return Config().execution_scheduler_port

@thread_cached_property
@property
@thread_cached
def execution_client(self) -> ExecutionManager:
return get_service_client(ExecutionManager)

Expand Down
20 changes: 13 additions & 7 deletions autogpt_platform/backend/backend/server/rest_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

import uvicorn
from autogpt_libs.auth.middleware import auth_middleware
from autogpt_libs.utils.cache import thread_cached_property
from autogpt_libs.utils.cache import thread_cached
from fastapi import APIRouter, Body, Depends, FastAPI, HTTPException, Request
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import JSONResponse
Expand Down Expand Up @@ -308,11 +308,13 @@ async def wrapper(*args, **kwargs):

return wrapper

@thread_cached_property
@property
@thread_cached
def execution_manager_client(self) -> ExecutionManager:
return get_service_client(ExecutionManager)

@thread_cached_property
@property
@thread_cached
def execution_scheduler_client(self) -> ExecutionScheduler:
return get_service_client(ExecutionScheduler)

Expand Down Expand Up @@ -541,7 +543,7 @@ async def stop_graph_run(
)

await asyncio.to_thread(
self.execution_manager_client.cancel_execution(graph_exec_id)
lambda: self.execution_manager_client.cancel_execution(graph_exec_id)
)

# Retrieve & return canceled graph execution in its final state
Expand Down Expand Up @@ -617,11 +619,15 @@ async def create_schedule(
graph = await graph_db.get_graph(graph_id, user_id=user_id)
if not graph:
raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.")
execution_scheduler = self.execution_scheduler_client

return {
"id": await asyncio.to_thread(
execution_scheduler.add_execution_schedule(
graph_id, graph.version, cron, input_data, user_id=user_id
lambda: self.execution_scheduler_client.add_execution_schedule(
graph_id=graph_id,
graph_version=graph.version,
cron=cron,
input_data=input_data,
user_id=user_id,
)
)
}
Expand Down

0 comments on commit cca81c4

Please sign in to comment.