Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(rnd): Added updated graph meta to include runs #8088

Merged
Show file tree
Hide file tree
Changes from 8 commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
21d8ab7
Added updated graph meta to include runs
Swiftyos Sep 18, 2024
b788534
Merge branch 'master' into swiftyos/secrt-871-monitor-page-crashing-o…
Swiftyos Sep 19, 2024
0967233
push logic server side
Swiftyos Sep 19, 2024
8ebf606
include runs for the monitor page load
Swiftyos Sep 19, 2024
0144b90
remove extra white space
Swiftyos Sep 19, 2024
e11a329
correct db query
Swiftyos Sep 19, 2024
419cdb9
fixed conversion
Swiftyos Sep 19, 2024
5c4f2e5
fix linting
Swiftyos Sep 19, 2024
cd4e20a
pr changes
Swiftyos Sep 19, 2024
f3fd0d0
fmt
Swiftyos Sep 19, 2024
59524ad
Merge branch 'master' of github.com:Significant-Gravitas/AutoGPT into…
majdyz Sep 19, 2024
c9d2c8d
Add flexible date getTime parsing
majdyz Sep 19, 2024
e800cb2
Merge remote-tracking branch 'origin/swiftyos/secrt-871-monitor-page-…
majdyz Sep 19, 2024
852eef9
disable sentry when running in dev
Swiftyos Sep 20, 2024
a6c6af9
Split out GraphMeta types to GraphMeta and GraphMetaWithRunds
Swiftyos Sep 20, 2024
678360c
cleared out build stuff in pyproject.toml
Swiftyos Sep 20, 2024
9434634
Update rnd/autogpt_builder/src/lib/autogpt-server-api/types.ts
Swiftyos Sep 20, 2024
27b3922
pr fixes
Swiftyos Sep 20, 2024
d912a96
Merge branch 'master' of github.com:Significant-Gravitas/AutoGPT into…
majdyz Sep 20, 2024
c8b602c
Merge branch 'swiftyos/secrt-871-monitor-page-crashing-on-due-to-numb…
majdyz Sep 20, 2024
7c41f85
lint fix
majdyz Sep 20, 2024
5c811a7
Revert added file
majdyz Sep 20, 2024
c388385
Merge branch 'master' into swiftyos/secrt-871-monitor-page-crashing-o…
Swiftyos Sep 23, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
126 changes: 37 additions & 89 deletions rnd/autogpt_builder/src/app/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import React, { useCallback, useEffect, useMemo, useState } from "react";

import AutoGPTServerAPI, {
GraphMeta,
ExecutionMeta,
NodeExecutionResult,
} from "@/lib/autogpt-server-api";

Expand All @@ -24,55 +25,28 @@ const Monitor = () => {

const api = useMemo(() => new AutoGPTServerAPI(), []);

const refreshFlowRuns = useCallback(
(flowID: string) => {
// Fetch flow run IDs
api.listGraphRunIDs(flowID).then((runIDs) =>
runIDs.map((runID) => {
let run;
if (
(run = flowRuns.find((fr) => fr.id == runID)) &&
!["waiting", "running"].includes(run.status)
) {
return;
}

// Fetch flow run
api.getGraphExecutionInfo(flowID, runID).then((execInfo) =>
setFlowRuns((flowRuns) => {
if (execInfo.length == 0) return flowRuns;

const flowRunIndex = flowRuns.findIndex((fr) => fr.id == runID);
const flowRun = flowRunFromNodeExecutionResults(execInfo);
if (flowRunIndex > -1) {
flowRuns.splice(flowRunIndex, 1, flowRun);
} else {
flowRuns.push(flowRun);
}
return [...flowRuns];
}),
);
}),
const fetchAgents = useCallback(() => {
api.listGraphs(true).then((agent) => {
setFlows(agent);
const flowRuns = agent.flatMap((graph) =>
graph.executions != null
? graph.executions.map((execution) =>
flowRunFromExecutionMeta(graph, execution),
)
: [],
);
},
[api, flowRuns],
);

const fetchFlowsAndRuns = useCallback(() => {
api.listGraphs().then((flows) => {
setFlows(flows);
flows.map((flow) => refreshFlowRuns(flow.id));
setFlowRuns(flowRuns);
});
}, [api, refreshFlowRuns]);
}, [api]);

useEffect(() => fetchFlowsAndRuns(), [fetchFlowsAndRuns]);
useEffect(() => {
const intervalId = setInterval(
() => flows.map((f) => refreshFlowRuns(f.id)),
5000,
);
fetchAgents();
}, [api, fetchAgents]);

useEffect(() => {
const intervalId = setInterval(() => fetchAgents(), 5000);
return () => clearInterval(intervalId);
}, [flows, refreshFlowRuns]);
}, [fetchAgents, flows]);

const column1 = "md:col-span-2 xl:col-span-3 xxl:col-span-2";
const column2 = "md:col-span-3 lg:col-span-2 xl:col-span-3 space-y-4";
Expand Down Expand Up @@ -123,56 +97,30 @@ const Monitor = () => {
);
};

function flowRunFromNodeExecutionResults(
nodeExecutionResults: NodeExecutionResult[],
function flowRunFromExecutionMeta(
graphMeta: GraphMeta,
executionMeta: ExecutionMeta,
): FlowRun {
// Determine overall status
let status: "running" | "waiting" | "success" | "failed" = "success";
for (const execution of nodeExecutionResults) {
if (execution.status === "FAILED") {
status = "failed";
break;
} else if (["QUEUED", "RUNNING"].includes(execution.status)) {
status = "running";
break;
} else if (execution.status === "INCOMPLETE") {
status = "waiting";
}
if (executionMeta.status === "FAILED") {
status = "failed";
} else if (["QUEUED", "RUNNING"].includes(executionMeta.status)) {
status = "running";
} else if (executionMeta.status === "INCOMPLETE") {
status = "waiting";
}

// Determine aggregate startTime, endTime, and totalRunTime
const now = Date.now();
const startTime = Math.min(
...nodeExecutionResults.map((ner) => ner.add_time.getTime()),
now,
);
const endTime = ["success", "failed"].includes(status)
? Math.max(
...nodeExecutionResults.map((ner) => ner.end_time?.getTime() || 0),
startTime,
)
: now;
const duration = (endTime - startTime) / 1000; // Convert to seconds
const totalRunTime =
nodeExecutionResults.reduce(
(cum, node) =>
cum +
((node.end_time?.getTime() ?? now) -
(node.start_time?.getTime() ?? now)),
0,
) / 1000;

return {
id: nodeExecutionResults[0].graph_exec_id,
graphID: nodeExecutionResults[0].graph_id,
graphVersion: nodeExecutionResults[0].graph_version,
id: executionMeta.execution_id,
graphID: graphMeta.id,
graphVersion: graphMeta.version,
status,
startTime,
endTime,
duration,
totalRunTime,
nodeExecutionResults: nodeExecutionResults,
};
startTime: new Date(executionMeta.started_at).getTime(),
endTime: executionMeta.ended_at
? new Date(executionMeta.ended_at).getTime()
: undefined,
Swiftyos marked this conversation as resolved.
Show resolved Hide resolved
duration: executionMeta.duration,
totalRunTime: executionMeta.total_run_time,
} as FlowRun;
}

export default Monitor;
4 changes: 2 additions & 2 deletions rnd/autogpt_builder/src/lib/autogpt-server-api/baseClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ export default class BaseAutoGPTServerAPI {
return await this._get("/blocks");
}

async listGraphs(): Promise<GraphMeta[]> {
return this._get("/graphs");
async listGraphs(with_runs: boolean = false): Promise<GraphMeta[]> {
return this._get(`/graphs?with_runs=${with_runs}`);
Swiftyos marked this conversation as resolved.
Show resolved Hide resolved
}

async listTemplates(): Promise<GraphMeta[]> {
Expand Down
11 changes: 11 additions & 0 deletions rnd/autogpt_builder/src/lib/autogpt-server-api/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -140,6 +140,16 @@ export type LinkCreatable = Omit<Link, "id" | "is_static"> & {
id?: string;
};

/* Mirror of autogpt_server/data/graph.py:ExecutionMeta */
export type ExecutionMeta = {
execution_id: string;
started_at: Date;
ended_at: Date;
duration: number;
total_run_time: number;
status: "INCOMPLETE" | "QUEUED" | "RUNNING" | "COMPLETED" | "FAILED";
};

/* Mirror of autogpt_server/data/graph.py:GraphMeta */
export type GraphMeta = {
id: string;
Expand All @@ -148,6 +158,7 @@ export type GraphMeta = {
is_template: boolean;
name: string;
description: string;
executions: ExecutionMeta[];
};

/* Mirror of autogpt_server/data/graph.py:Graph */
Expand Down
70 changes: 63 additions & 7 deletions rnd/autogpt_server/autogpt_server/data/graph.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
import asyncio
import logging
import uuid
from datetime import datetime, timezone
from pathlib import Path
from typing import Any, Literal

import prisma.types
from prisma.models import AgentGraph, AgentNode, AgentNodeLink
from prisma.models import AgentGraph, AgentGraphExecution, AgentNode, AgentNodeLink
from prisma.types import AgentGraphInclude
from pydantic import BaseModel, PrivateAttr
from pydantic_core import PydanticUndefinedType

import autogpt_server.data.execution
from autogpt_server.blocks.basic import AgentInputBlock, AgentOutputBlock
from autogpt_server.data.block import BlockInput, get_block, get_blocks
from autogpt_server.data.db import BaseDbModel, transaction
Expand Down Expand Up @@ -77,23 +80,67 @@ def from_db(node: AgentNode):
return obj


class ExecutionMeta(BaseDbModel):
execution_id: str
started_at: datetime
ended_at: datetime
duration: float
total_run_time: float
status: autogpt_server.data.execution.ExecutionStatus

@staticmethod
def from_agent_graph_execution(execution: AgentGraphExecution):
now = datetime.now(timezone.utc)
start_time = execution.startedAt or execution.createdAt
end_time = execution.updatedAt or now
duration = (end_time - start_time).total_seconds()

total_run_time = 0
if execution.AgentNodeExecutions:
for node_execution in execution.AgentNodeExecutions:
node_start = node_execution.startedTime or now
node_end = node_execution.endedTime or now
total_run_time += (node_end - node_start).total_seconds()

return ExecutionMeta(
id=execution.id,
execution_id=execution.id,
started_at=start_time,
ended_at=end_time,
duration=duration,
total_run_time=total_run_time,
status=autogpt_server.data.execution.ExecutionStatus(
execution.executionStatus
),
)


class GraphMeta(BaseDbModel):
version: int = 1
is_active: bool = True
is_template: bool = False

name: str
description: str
executions: list[ExecutionMeta] = []
Swiftyos marked this conversation as resolved.
Show resolved Hide resolved

@staticmethod
def from_db(graph: AgentGraph):
if graph.AgentGraphExecution:
executions = [
ExecutionMeta.from_agent_graph_execution(execution)
for execution in graph.AgentGraphExecution
]
else:
executions = []

return GraphMeta(
id=graph.id,
version=graph.version,
is_active=graph.isActive,
is_template=graph.isTemplate,
name=graph.name or "",
description=graph.description or "",
executions=executions,
)


Expand Down Expand Up @@ -338,6 +385,7 @@ async def get_node(node_id: str) -> Node:


async def get_graphs_meta(
include_executions: bool = False,
Swiftyos marked this conversation as resolved.
Show resolved Hide resolved
filter_by: Literal["active", "template"] | None = "active",
user_id: str | None = None,
) -> list[GraphMeta]:
Expand All @@ -361,11 +409,19 @@ async def get_graphs_meta(
if user_id and filter_by != "template":
where_clause["userId"] = user_id

graphs = await AgentGraph.prisma().find_many(
where=where_clause,
distinct=["id"],
order={"version": "desc"},
)
if include_executions:
graphs = await AgentGraph.prisma().find_many(
where=where_clause,
distinct=["id"],
order={"version": "desc"},
include=AgentGraphInclude(AgentGraphExecution={"include": {"AgentNodeExecutions": True}}), # type: ignore
)
else:
graphs = await AgentGraph.prisma().find_many(
where=where_clause,
distinct=["id"],
order={"version": "desc"},
)
Swiftyos marked this conversation as resolved.
Show resolved Hide resolved

if not graphs:
return []
Expand Down
8 changes: 6 additions & 2 deletions rnd/autogpt_server/autogpt_server/server/rest_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -326,9 +326,13 @@ def execute_graph_block(

@classmethod
async def get_graphs(
cls, user_id: Annotated[str, Depends(get_user_id)]
cls,
user_id: Annotated[str, Depends(get_user_id)],
with_runs: bool = False,
) -> list[graph_db.GraphMeta]:
return await graph_db.get_graphs_meta(filter_by="active", user_id=user_id)
return await graph_db.get_graphs_meta(
include_executions=with_runs, filter_by="active", user_id=user_id
)

@classmethod
async def get_templates(cls) -> list[graph_db.GraphMeta]:
Expand Down
Loading