From d9226888b20d9f182cbe889fd342aae69eef7c8f Mon Sep 17 00:00:00 2001 From: Zamil Majdy Date: Fri, 21 Jun 2024 20:50:50 +0400 Subject: [PATCH 01/23] feat(rnd): Add node metadata on Agent Server Node object (#7237) --- rnd/autogpt_server/autogpt_server/data/graph.py | 3 +++ rnd/autogpt_server/schema.prisma | 3 +++ 2 files changed, 6 insertions(+) diff --git a/rnd/autogpt_server/autogpt_server/data/graph.py b/rnd/autogpt_server/autogpt_server/data/graph.py index 8d03e272b8d0..103da14d6d88 100644 --- a/rnd/autogpt_server/autogpt_server/data/graph.py +++ b/rnd/autogpt_server/autogpt_server/data/graph.py @@ -15,6 +15,7 @@ class Node(BaseDbModel): # TODO: Make it `dict[str, list[str]]`, output can be connected to multiple blocks. # Other option is to use an edge-list, but it will complicate the rest code. output_nodes: dict[str, str] = {} # dict[output_name, node_id] + metadata: dict[str, Any] = {} @staticmethod def from_db(node: AgentNode): @@ -27,6 +28,7 @@ def from_db(node: AgentNode): input_default=json.loads(node.constantInput), input_nodes={v.sinkName: v.agentNodeSourceId for v in node.Input or []}, output_nodes={v.sourceName: v.agentNodeSinkId for v in node.Output or []}, + metadata=json.loads(node.metadata), ) def connect(self, node: "Node", source_name: str, sink_name: str): @@ -133,6 +135,7 @@ async def create_graph(graph: Graph) -> Graph: "agentBlockId": node.block_id, "agentGraphId": graph.id, "constantInput": json.dumps(node.input_default), + "metadata": json.dumps(node.metadata), }) for node in graph.nodes ]) diff --git a/rnd/autogpt_server/schema.prisma b/rnd/autogpt_server/schema.prisma index 691683ece2ca..a4b37e083956 100644 --- a/rnd/autogpt_server/schema.prisma +++ b/rnd/autogpt_server/schema.prisma @@ -37,6 +37,9 @@ model AgentNode { // JSON serialized dict[str, str] containing predefined input values. constantInput String @default("{}") + // JSON serialized dict[str, str] containing the node metadata. + metadata String @default("{}") + ExecutionHistory AgentNodeExecution[] } From 9f1e5218578f3eae08f2a6211438b484a0aef3ba Mon Sep 17 00:00:00 2001 From: Zamil Majdy Date: Mon, 24 Jun 2024 06:41:02 +0400 Subject: [PATCH 02/23] feat(rnd): Add AutoGPT server scheduling service (#7226) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Background Agent execution should be able to be triggered in a recurring manner. This PR introduced an ExecutionScheduling service, a process responsible for managing the execution schedule and triggering its execution based on a predefined cron expression. ### Changes 🏗️ * Added `scheduler.py` / `ExecutionScheduler` implementation. * Added scheduler test. * Added `AgentExecutionSchedule` table and its logical model & prisma queries. * Moved `add_execution` from API server to `execution_manager` --- rnd/autogpt_server/autogpt_server/app.py | 2 +- .../autogpt_server/data/schedule.py | 89 ++++++++++++ .../autogpt_server/executor/manager.py | 90 +++++++++---- .../autogpt_server/executor/scheduler.py | 77 +++++++++-- .../autogpt_server/server/server.py | 107 +++++++-------- .../autogpt_server/util/service.py | 14 +- rnd/autogpt_server/poetry.lock | 127 +++++++++++++++++- rnd/autogpt_server/pyproject.toml | 6 + rnd/autogpt_server/schema.prisma | 20 ++- .../test/executor/test_manager.py | 30 ++--- .../test/executor/test_scheduler.py | 33 +++++ rnd/autogpt_server/test/util/test_service.py | 3 - 12 files changed, 474 insertions(+), 124 deletions(-) create mode 100644 rnd/autogpt_server/autogpt_server/data/schedule.py create mode 100644 rnd/autogpt_server/test/executor/test_scheduler.py diff --git a/rnd/autogpt_server/autogpt_server/app.py b/rnd/autogpt_server/autogpt_server/app.py index 0f1b02c00753..298852129648 100644 --- a/rnd/autogpt_server/autogpt_server/app.py +++ b/rnd/autogpt_server/autogpt_server/app.py @@ -22,8 +22,8 @@ def main(**kwargs): run_processes( [ PyroNameServer(), - ExecutionScheduler(), ExecutionManager(pool_size=5), + ExecutionScheduler(), AgentServer(), ], **kwargs diff --git a/rnd/autogpt_server/autogpt_server/data/schedule.py b/rnd/autogpt_server/autogpt_server/data/schedule.py new file mode 100644 index 000000000000..d657300b8a64 --- /dev/null +++ b/rnd/autogpt_server/autogpt_server/data/schedule.py @@ -0,0 +1,89 @@ +import json +from datetime import datetime +from typing import Optional, Any + +from prisma.models import AgentExecutionSchedule + +from autogpt_server.data.db import BaseDbModel + + +class ExecutionSchedule(BaseDbModel): + id: str + agent_id: str + schedule: str + is_enabled: bool + input_data: dict[str, Any] + last_updated: Optional[datetime] = None + + def __init__( + self, + is_enabled: Optional[bool] = None, + **kwargs + ): + if is_enabled is None: + is_enabled = True + super().__init__(is_enabled=is_enabled, **kwargs) + + @staticmethod + def from_db(schedule: AgentExecutionSchedule): + return ExecutionSchedule( + id=schedule.id, + agent_id=schedule.agentGraphId, + schedule=schedule.schedule, + is_enabled=schedule.isEnabled, + last_updated=schedule.lastUpdated.replace(tzinfo=None), + input_data=json.loads(schedule.inputData), + ) + + +async def get_active_schedules(last_fetch_time: datetime) -> list[ExecutionSchedule]: + query = AgentExecutionSchedule.prisma().find_many( + where={ + "isEnabled": True, + "lastUpdated": {"gt": last_fetch_time} + }, + order={"lastUpdated": "asc"} + ) + return [ + ExecutionSchedule.from_db(schedule) + for schedule in await query + ] + + +async def disable_schedule(schedule_id: str): + await AgentExecutionSchedule.prisma().update( + where={"id": schedule_id}, + data={"isEnabled": False} + ) + + +async def get_schedules(agent_id: str) -> list[ExecutionSchedule]: + query = AgentExecutionSchedule.prisma().find_many( + where={ + "isEnabled": True, + "agentGraphId": agent_id, + }, + ) + return [ + ExecutionSchedule.from_db(schedule) + for schedule in await query + ] + + +async def add_schedule(schedule: ExecutionSchedule): + await AgentExecutionSchedule.prisma().create( + data={ + "id": schedule.id, + "agentGraphId": schedule.agent_id, + "schedule": schedule.schedule, + "isEnabled": schedule.is_enabled, + "inputData": json.dumps(schedule.input_data), + } + ) + + +async def update_schedule(schedule_id: str, is_enabled: bool): + await AgentExecutionSchedule.prisma().update( + where={"id": schedule_id}, + data={"isEnabled": is_enabled} + ) diff --git a/rnd/autogpt_server/autogpt_server/executor/manager.py b/rnd/autogpt_server/autogpt_server/executor/manager.py index 7fd282ebeceb..5296f0c8635b 100644 --- a/rnd/autogpt_server/autogpt_server/executor/manager.py +++ b/rnd/autogpt_server/autogpt_server/executor/manager.py @@ -1,12 +1,12 @@ import asyncio import logging - +import uuid from concurrent.futures import ProcessPoolExecutor from typing import Optional, Any from autogpt_server.data import db from autogpt_server.data.block import Block, get_block -from autogpt_server.data.graph import Node, get_node, get_node_input +from autogpt_server.data.graph import Node, get_node, get_node_input, get_graph from autogpt_server.data.execution import ( Execution, ExecutionQueue, @@ -21,7 +21,7 @@ def get_log_prefix(run_id: str, exec_id: str, block_name: str = "-"): - return f"[Execution graph-{run_id}|node-{exec_id}|{block_name}]" + return f"[ExecutionManager] [graph-{run_id}|node-{exec_id}|{block_name}]" def execute_node(loop: asyncio.AbstractEventLoop, data: Execution) -> Execution | None: @@ -80,26 +80,38 @@ def execute_node(loop: asyncio.AbstractEventLoop, data: Execution) -> Execution return None next_node_input: dict[str, Any] = wait(get_node_input(next_node, run_id)) - next_node_block: Block | None = wait(get_block(next_node.block_id)) - if not next_node_block: - logger.error(f"{prefix} Error, next block {next_node.block_id} not found.") + is_valid, validation_resp = wait(validate_exec(next_node, next_node_input)) + if not is_valid: + logger.warning(f"{prefix} Skipped {next_node_id}: {validation_resp}") return None - if not set(next_node.input_nodes).issubset(next_node_input): - logger.warning( - f"{prefix} Skipped {next_node_id}-{next_node_block.name}, " - f"missing: {set(next_node.input_nodes) - set(next_node_input)}" - ) - return None + logger.warning(f"{prefix} Enqueue next node {next_node_id}-{validation_resp}") + return Execution(run_id=run_id, node_id=next_node_id, data=next_node_input) - if error := next_node_block.input_schema.validate_data(next_node_input): - logger.warning( - f"{prefix} Skipped {next_node_id}-{next_node_block.name}, {error}" - ) - return None - logger.warning(f"{prefix} Enqueue next node {next_node_id}-{next_node_block.name}") - return Execution(run_id=run_id, node_id=next_node_id, data=next_node_input) +async def validate_exec(node: Node, data: dict[str, Any]) -> tuple[bool, str]: + """ + Validate the input data for a node execution. + + Args: + node: The node to execute. + data: The input data for the node execution. + + Returns: + A tuple of a boolean indicating if the data is valid, and a message if not. + Return the executed block name if the data is valid. + """ + node_block: Block | None = await(get_block(node.block_id)) + if not node_block: + return False, f"Block for {node.block_id} not found." + + if not set(node.input_nodes).issubset(data): + return False, f"Input data missing: {set(node.input_nodes) - set(data)}" + + if error := node_block.input_schema.validate_data(data): + return False, f"Input data doesn't match {node_block.name}: {error}" + + return True, node_block.name class Executor: @@ -138,7 +150,7 @@ def on_complete_execution(f: asyncio.Future[Execution | None]): execution = f.result() if execution: - return self.__add_execution(execution) + return self.add_node_execution(execution) return None @@ -155,14 +167,34 @@ def on_complete_execution(f: asyncio.Future[Execution | None]): future.add_done_callback(on_complete_execution) # type: ignore @expose - def add_execution(self, run_id: str, node_id: str, data: dict[str, Any]) -> str: - try: - execution = Execution(run_id=run_id, node_id=node_id, data=data) - self.__add_execution(execution) - return execution.id - except Exception as e: - raise Exception("Error adding execution ", e) - - def __add_execution(self, execution: Execution) -> Execution: + def add_execution(self, graph_id: str, data: dict[str, Any]) -> dict: + run_id = str(uuid.uuid4()) + + agent = self.run_and_wait(get_graph(graph_id)) + if not agent: + raise Exception(f"Agent #{graph_id} not found.") + + # Currently, there is no constraint on the number of root nodes in the graph. + for node in agent.starting_nodes: + valid, error = self.run_and_wait(validate_exec(node, data)) + if not valid: + raise Exception(error) + + executions = [] + for node in agent.starting_nodes: + exec_id = self.add_node_execution( + Execution(run_id=run_id, node_id=node.id, data=data) + ) + executions.append({ + "exec_id": exec_id, + "node_id": node.id, + }) + + return { + "run_id": run_id, + "executions": executions, + } + + def add_node_execution(self, execution: Execution) -> Execution: self.run_and_wait(enqueue_execution(execution)) return self.queue.add(execution) diff --git a/rnd/autogpt_server/autogpt_server/executor/scheduler.py b/rnd/autogpt_server/autogpt_server/executor/scheduler.py index b208e20e2acf..a82619af7d34 100644 --- a/rnd/autogpt_server/autogpt_server/executor/scheduler.py +++ b/rnd/autogpt_server/autogpt_server/executor/scheduler.py @@ -1,23 +1,82 @@ +import logging import time -from autogpt_server.util.service import AppService, expose +from apscheduler.schedulers.background import BackgroundScheduler +from apscheduler.triggers.cron import CronTrigger +from datetime import datetime + +from autogpt_server.data import schedule as model +from autogpt_server.util.service import AppService, expose, get_service_client +from autogpt_server.executor.manager import ExecutionManager + +logger = logging.getLogger(__name__) + + +def log(msg, **kwargs): + logger.warning("[ExecutionScheduler] " + msg, **kwargs) class ExecutionScheduler(AppService): + def __init__(self, refresh_interval=10): + self.last_check = datetime.min + self.refresh_interval = refresh_interval + + @property + def execution_manager_client(self): + return get_service_client(ExecutionManager) + def run_service(self): + scheduler = BackgroundScheduler() + scheduler.start() while True: - time.sleep(1) # This will be replaced with apscheduler executor. + self.__refresh_jobs_from_db(scheduler) + time.sleep(self.refresh_interval) + + def __refresh_jobs_from_db(self, scheduler: BackgroundScheduler): + schedules = self.run_and_wait(model.get_active_schedules(self.last_check)) + for schedule in schedules: + self.last_check = max(self.last_check, schedule.last_updated) + + if not schedule.is_enabled: + log(f"Removing recurring job {schedule.id}: {schedule.schedule}") + scheduler.remove_job(schedule.id) + continue + + log(f"Adding recurring job {schedule.id}: {schedule.schedule}") + scheduler.add_job( + self.__execute_agent, + CronTrigger.from_crontab(schedule.schedule), + id=schedule.id, + args=[schedule.agent_id, schedule.input_data], + replace_existing=True, + ) + + def __execute_agent(self, agent_id: str, input_data: dict): + try: + log(f"Executing recurring job for agent #{agent_id}") + execution_manager = self.execution_manager_client + execution_manager.add_execution(agent_id, input_data) + except Exception as e: + logger.error(f"Error executing agent {agent_id}: {e}") + + @expose + def update_schedule(self, schedule_id: str, is_enabled: bool) -> str: + self.run_and_wait(model.update_schedule(schedule_id, is_enabled)) + return schedule_id @expose def add_execution_schedule(self, agent_id: str, cron: str, input_data: dict) -> str: - print( - f"Adding execution schedule for agent {agent_id} with cron {cron} and " - f"input data {input_data}" + schedule = model.ExecutionSchedule( + agent_id=agent_id, + schedule=cron, + input_data=input_data, ) - return "dummy_schedule_id" + self.run_and_wait(model.add_schedule(schedule)) + return schedule.id @expose - def get_execution_schedules(self, agent_id: str) -> list[dict]: - print(f"Getting execution schedules for agent {agent_id}") - return [{"cron": "dummy_cron", "input_data": {"dummy_input": "dummy_value"}}] + def get_execution_schedules(self, agent_id: str) -> dict[str, str]: + query = model.get_schedules(agent_id) + schedules: list[model.ExecutionSchedule] = self.run_and_wait(query) + return {v.id: v.schedule for v in schedules} diff --git a/rnd/autogpt_server/autogpt_server/server/server.py b/rnd/autogpt_server/autogpt_server/server/server.py index a147244142e8..7e787cf5b575 100644 --- a/rnd/autogpt_server/autogpt_server/server/server.py +++ b/rnd/autogpt_server/autogpt_server/server/server.py @@ -34,66 +34,75 @@ def run(self): router = APIRouter() router.add_api_route( path="/blocks", - endpoint=AgentServer.get_agent_blocks, + endpoint=self.get_agent_blocks, methods=["GET"], ) router.add_api_route( path="/agents", - endpoint=AgentServer.get_agents, + endpoint=self.get_agents, methods=["GET"], ) router.add_api_route( path="/agents/{agent_id}", - endpoint=AgentServer.get_agent, + endpoint=self.get_agent, methods=["GET"], ) router.add_api_route( path="/agents", - endpoint=AgentServer.create_agent, + endpoint=self.create_agent, methods=["POST"], ) router.add_api_route( path="/agents/{agent_id}/execute", - endpoint=AgentServer.execute_agent, + endpoint=self.execute_agent, methods=["POST"], ) router.add_api_route( path="/agents/{agent_id}/executions/{run_id}", - endpoint=AgentServer.get_executions, + endpoint=self.get_executions, methods=["GET"], ) router.add_api_route( path="/agents/{agent_id}/schedules", - endpoint=AgentServer.schedule_agent, + endpoint=self.schedule_agent, methods=["POST"], ) router.add_api_route( path="/agents/{agent_id}/schedules", - endpoint=AgentServer.get_execution_schedules, + endpoint=self.get_execution_schedules, methods=["GET"], ) + router.add_api_route( + path="/agents/schedules/{schedule_id}", + endpoint=self.update_schedule, + methods=["PUT"], + ) app.include_router(router) uvicorn.run(app, host="0.0.0.0", port=8000) - @staticmethod - async def get_agent_blocks() -> list[dict]: + @property + def execution_manager_client(self) -> ExecutionManager: + return get_service_client(ExecutionManager) + + @property + def execution_scheduler_client(self) -> ExecutionScheduler: + return get_service_client(ExecutionScheduler) + + async def get_agent_blocks(self) -> list[dict]: return [v.to_dict() for v in await block.get_blocks()] - @staticmethod - async def get_agents() -> list[str]: + async def get_agents(self) -> list[str]: return await graph.get_graph_ids() - @staticmethod - async def get_agent(agent_id: str) -> graph.Graph: + async def get_agent(self, agent_id: str) -> graph.Graph: agent = await graph.get_graph(agent_id) if not agent: raise HTTPException(status_code=404, detail=f"Agent #{agent_id} not found.") return agent - @staticmethod - async def create_agent(agent: graph.Graph) -> graph.Graph: + async def create_agent(self, agent: graph.Graph) -> graph.Graph: agent.id = str(uuid.uuid4()) id_map = {node.id: str(uuid.uuid4()) for node in agent.nodes} @@ -104,62 +113,36 @@ async def create_agent(agent: graph.Graph) -> graph.Graph: return await graph.create_graph(agent) - @staticmethod - async def execute_agent(agent_id: str, node_input: dict) -> dict: - agent = await graph.get_graph(agent_id) - if not agent: - raise HTTPException(status_code=404, detail=f"Agent #{agent_id} not found.") - - run_id = str(uuid.uuid4()) - executions = [] - execution_manager = get_service_client(ExecutionManager) - - # Currently, there is no constraint on the number of root nodes in the graph. - for node in agent.starting_nodes: - node_block = await block.get_block(node.block_id) - if not node_block: - raise HTTPException( - status_code=404, - detail=f"Block #{node.block_id} not found.", - ) - if error := node_block.input_schema.validate_data(node_input): - raise HTTPException( - status_code=400, - detail=f"Input data doesn't match {node_block.name} input: {error}", - ) - - exec_id = execution_manager.add_execution( - run_id=run_id, node_id=node.id, data=node_input - ) - executions.append({ - "exec_id": exec_id, - "node_id": node.id, - }) + async def execute_agent(self, agent_id: str, node_input: dict) -> dict: + try: + return self.execution_manager_client.add_execution(agent_id, node_input) + except Exception as e: + msg = e.__str__().encode().decode('unicode_escape') + raise HTTPException(status_code=400, detail=msg) - return { - "run_id": run_id, - "executions": executions, - } - - @staticmethod async def get_executions( - agent_id: str, - run_id: str - ) -> list[execution.ExecutionResult]: + self, agent_id: str, run_id: str) -> list[execution.ExecutionResult]: agent = await graph.get_graph(agent_id) if not agent: raise HTTPException(status_code=404, detail=f"Agent #{agent_id} not found.") return await execution.get_executions(run_id) - @staticmethod - def schedule_agent(agent_id: str, cron: str, input_data: dict) -> dict: - execution_scheduler = get_service_client(ExecutionScheduler) + async def schedule_agent(self, agent_id: str, cron: str, input_data: dict) -> dict: + agent = await graph.get_graph(agent_id) + if not agent: + raise HTTPException(status_code=404, detail=f"Agent #{agent_id} not found.") + execution_scheduler = self.execution_scheduler_client return { "id": execution_scheduler.add_execution_schedule(agent_id, cron, input_data) } - @staticmethod - def get_execution_schedules(agent_id: str) -> list[dict]: - execution_scheduler = get_service_client(ExecutionScheduler) + def update_schedule(self, schedule_id: str, input_data: dict) -> dict: + execution_scheduler = self.execution_scheduler_client + is_enabled = input_data.get("is_enabled", False) + execution_scheduler.update_schedule(schedule_id, is_enabled) + return {"id": schedule_id} + + def get_execution_schedules(self, agent_id: str) -> dict[str, str]: + execution_scheduler = self.execution_scheduler_client return execution_scheduler.get_execution_schedules(agent_id) diff --git a/rnd/autogpt_server/autogpt_server/util/service.py b/rnd/autogpt_server/autogpt_server/util/service.py index 29bd19d8e290..5cfc3d220ff2 100644 --- a/rnd/autogpt_server/autogpt_server/util/service.py +++ b/rnd/autogpt_server/autogpt_server/util/service.py @@ -15,7 +15,18 @@ logger = logging.getLogger(__name__) conn_retry = retry(stop=stop_after_delay(5), wait=wait_exponential(multiplier=0.1)) -expose = pyro.expose + + +def expose(func: Callable) -> Callable: + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except Exception as e: + msg = f"Error in {func.__name__}: {e.__str__()}" + logger.error(msg) + raise Exception(msg, e) + + return pyro.expose(wrapper) class PyroNameServer(AppProcess): @@ -28,7 +39,6 @@ def run(self): class AppService(AppProcess): - shared_event_loop: asyncio.AbstractEventLoop @classmethod diff --git a/rnd/autogpt_server/poetry.lock b/rnd/autogpt_server/poetry.lock index 0b2987b86128..50cf787bcef2 100644 --- a/rnd/autogpt_server/poetry.lock +++ b/rnd/autogpt_server/poetry.lock @@ -33,6 +33,34 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (>=0.23)"] +[[package]] +name = "apscheduler" +version = "3.10.4" +description = "In-process task scheduler with Cron-like capabilities" +optional = false +python-versions = ">=3.6" +files = [ + {file = "APScheduler-3.10.4-py3-none-any.whl", hash = "sha256:fb91e8a768632a4756a585f79ec834e0e27aad5860bac7eaa523d9ccefd87661"}, + {file = "APScheduler-3.10.4.tar.gz", hash = "sha256:e6df071b27d9be898e486bc7940a7be50b4af2e9da7c08f0744a96d4bd4cef4a"}, +] + +[package.dependencies] +pytz = "*" +six = ">=1.4.0" +tzlocal = ">=2.0,<3.dev0 || >=4.dev0" + +[package.extras] +doc = ["sphinx", "sphinx-rtd-theme"] +gevent = ["gevent"] +mongodb = ["pymongo (>=3.0)"] +redis = ["redis (>=3.0)"] +rethinkdb = ["rethinkdb (>=2.4.0)"] +sqlalchemy = ["sqlalchemy (>=1.4)"] +testing = ["pytest", "pytest-asyncio", "pytest-cov", "pytest-tornado5"] +tornado = ["tornado (>=4.3)"] +twisted = ["twisted"] +zookeeper = ["kazoo"] + [[package]] name = "attrs" version = "23.2.0" @@ -88,6 +116,21 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "croniter" +version = "2.0.5" +description = "croniter provides iteration for datetime object with cron like format" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.6" +files = [ + {file = "croniter-2.0.5-py2.py3-none-any.whl", hash = "sha256:fdbb44920944045cc323db54599b321325141d82d14fa7453bc0699826bbe9ed"}, + {file = "croniter-2.0.5.tar.gz", hash = "sha256:f1f8ca0af64212fbe99b1bee125ee5a1b53a9c1b433968d8bca8817b79d237f3"}, +] + +[package.dependencies] +python-dateutil = "*" +pytz = ">2021.1" + [[package]] name = "cx-freeze" version = "7.0.0" @@ -850,6 +893,24 @@ tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +[[package]] +name = "pytest-asyncio" +version = "0.23.7" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_asyncio-0.23.7-py3-none-any.whl", hash = "sha256:009b48127fbe44518a547bddd25611551b0e43ccdbf1e67d12479f569832c20b"}, + {file = "pytest_asyncio-0.23.7.tar.gz", hash = "sha256:5f5c72948f4c49e7db4f29f2521d4031f1c27f86e57b046126654083d4770268"}, +] + +[package.dependencies] +pytest = ">=7.0.0,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + [[package]] name = "pytest-watcher" version = "0.4.2" @@ -865,6 +926,20 @@ files = [ tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version < \"3.11\""} watchdog = ">=2.0.0" +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + [[package]] name = "python-dotenv" version = "1.0.1" @@ -879,6 +954,17 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + [[package]] name = "pyyaml" version = "6.0.1" @@ -1115,6 +1201,17 @@ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments testing = ["build[virtualenv]", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.2)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + [[package]] name = "sniffio" version = "1.3.1" @@ -1207,6 +1304,34 @@ files = [ {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "tzlocal" +version = "5.2" +description = "tzinfo object for the local timezone" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, + {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, +] + +[package.dependencies] +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] + [[package]] name = "uvicorn" version = "0.30.1" @@ -1506,4 +1631,4 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "de508427e9804ded3b3139e13f209baa6cc97bc138d83952ad2b129d3aedc4e2" +content-hash = "17f25b61da5f54bb4bb13cecfedda56d23c097aacb95bb213f13ce63ee08c761" diff --git a/rnd/autogpt_server/pyproject.toml b/rnd/autogpt_server/pyproject.toml index a773707b389e..2de400fa7ad3 100644 --- a/rnd/autogpt_server/pyproject.toml +++ b/rnd/autogpt_server/pyproject.toml @@ -22,6 +22,9 @@ jsonschema = "^4.22.0" psutil = "^5.9.8" pyro5 = "^5.15" tenacity = "^8.3.0" +apscheduler = "^3.10.4" +croniter = "^2.0.5" +pytest-asyncio = "^0.23.7" [tool.poetry.group.dev.dependencies] @@ -80,3 +83,6 @@ runner = "pytest" runner_args = [] patterns = ["*.py"] ignore_patterns = [] + +[tool.pytest.ini_options] +asyncio_mode = "auto" \ No newline at end of file diff --git a/rnd/autogpt_server/schema.prisma b/rnd/autogpt_server/schema.prisma index a4b37e083956..dd049b6c76f6 100644 --- a/rnd/autogpt_server/schema.prisma +++ b/rnd/autogpt_server/schema.prisma @@ -15,7 +15,8 @@ model AgentGraph { name String? description String? - AgentNodes AgentNode[] @relation("AgentGraphNodes") + AgentNodes AgentNode[] @relation("AgentGraphNodes") + AgentExecutionSchedule AgentExecutionSchedule[] } // This model describes a single node in the Agent Graph/Flow (Multi Agent System). @@ -108,3 +109,20 @@ model FileDefinition { ReferencedByInputFiles AgentNodeExecution[] @relation("InputFiles") ReferencedByOutputFiles AgentNodeExecution[] @relation("OutputFiles") } + +// This model describes the recurring execution schedule of an Agent. +model AgentExecutionSchedule { + id String @id + + agentGraphId String + AgentGraph AgentGraph @relation(fields: [agentGraphId], references: [id]) + + schedule String // cron expression + isEnabled Boolean @default(true) + inputData String // JSON serialized object + + // default and set the value on each update, lastUpdated field has no time zone. + lastUpdated DateTime @updatedAt + + @@index([isEnabled]) +} diff --git a/rnd/autogpt_server/test/executor/test_manager.py b/rnd/autogpt_server/test/executor/test_manager.py index f8dc6d79a3fe..38af3f0471b0 100644 --- a/rnd/autogpt_server/test/executor/test_manager.py +++ b/rnd/autogpt_server/test/executor/test_manager.py @@ -1,6 +1,7 @@ -import asyncio import time +import pytest + from autogpt_server.data import block, db, execution, graph from autogpt_server.executor import ExecutionManager from autogpt_server.server import AgentServer @@ -44,28 +45,29 @@ async def create_test_graph() -> graph.Graph: return test_graph -def execute_agent(test_manager: ExecutionManager, test_graph: graph.Graph, wait_db): +async def execute_agent(test_manager: ExecutionManager, test_graph: graph.Graph): # --- Test adding new executions --- # text = "Hello, World!" input_data = {"input": text} - response = wait_db(AgentServer.execute_agent(test_graph.id, input_data)) + agent_server = AgentServer() + response = await agent_server.execute_agent(test_graph.id, input_data) executions = response["executions"] run_id = response["run_id"] assert len(executions) == 2 async def is_execution_completed(): - execs = await AgentServer.get_executions(test_graph.id, run_id) + execs = await agent_server.get_executions(test_graph.id, run_id) return test_manager.queue.empty() and len(execs) == 4 # Wait for the executions to complete for i in range(10): - if wait_db(is_execution_completed()): + if await is_execution_completed(): break time.sleep(1) # Execution queue should be empty - assert wait_db(is_execution_completed()) - executions = wait_db(AgentServer.get_executions(test_graph.id, run_id)) + assert await is_execution_completed() + executions = await agent_server.get_executions(test_graph.id, run_id) # Executing ParrotBlock1 exec = executions[0] @@ -108,14 +110,10 @@ async def is_execution_completed(): assert exec.node_id == test_graph.nodes[3].id -def test_agent_execution(): +@pytest.mark.asyncio(scope="session") +async def test_agent_execution(): with PyroNameServer(): - time.sleep(0.5) with ExecutionManager(1) as test_manager: - loop = asyncio.new_event_loop() - wait = loop.run_until_complete - - wait(db.connect()) - test_graph = wait(create_test_graph()) - - execute_agent(test_manager, test_graph, wait) + await db.connect() + test_graph = await create_test_graph() + await execute_agent(test_manager, test_graph) diff --git a/rnd/autogpt_server/test/executor/test_scheduler.py b/rnd/autogpt_server/test/executor/test_scheduler.py new file mode 100644 index 000000000000..4ea154d060c8 --- /dev/null +++ b/rnd/autogpt_server/test/executor/test_scheduler.py @@ -0,0 +1,33 @@ +import pytest + +import test_manager +from autogpt_server.executor.scheduler import ExecutionScheduler +from autogpt_server.util.service import PyroNameServer, get_service_client + + +@pytest.mark.asyncio(scope="session") +async def test_agent_schedule(): + await test_manager.db.connect() + test_graph = await test_manager.create_test_graph() + + with PyroNameServer(): + with ExecutionScheduler(): + scheduler = get_service_client(ExecutionScheduler) + + schedules = scheduler.get_execution_schedules(test_graph.id) + assert len(schedules) == 0 + + schedule_id = scheduler.add_execution_schedule( + test_graph.id, + "0 0 * * *", + {"input": "data"} + ) + assert schedule_id + + schedules = scheduler.get_execution_schedules(test_graph.id) + assert len(schedules) == 1 + assert schedules[schedule_id] == "0 0 * * *" + + scheduler.update_schedule(schedule_id, is_enabled=False) + schedules = scheduler.get_execution_schedules(test_graph.id) + assert len(schedules) == 0 diff --git a/rnd/autogpt_server/test/util/test_service.py b/rnd/autogpt_server/test/util/test_service.py index 34532b996308..bdbd5689391e 100644 --- a/rnd/autogpt_server/test/util/test_service.py +++ b/rnd/autogpt_server/test/util/test_service.py @@ -1,5 +1,3 @@ -import time - from autogpt_server.util.service import ( AppService, PyroNameServer, @@ -30,7 +28,6 @@ async def add_async(a: int, b: int) -> int: def test_service_creation(): with PyroNameServer(): - time.sleep(0.5) with TestService(): client = get_service_client(TestService) assert client.add(5, 3) == 8 From 3e01b19d6f5095ed968bfddf867faee28abc45c4 Mon Sep 17 00:00:00 2001 From: Bently Date: Tue, 25 Jun 2024 02:24:10 +0100 Subject: [PATCH 03/23] chore(forge): Update `duckduckgo-search` to v6.1.7 (#7254) This should (for now) mitigate the RateLimitErrors that people have been experiencing. --------- Co-authored-by: Reinier van der Leer --- autogpt/poetry.lock | 202 +++++++++++++++++++++++++------------------ forge/poetry.lock | 198 ++++++++++++++++++++++++------------------ forge/pyproject.toml | 2 +- 3 files changed, 233 insertions(+), 169 deletions(-) diff --git a/autogpt/poetry.lock b/autogpt/poetry.lock index 1c1d0d09afdd..141f559579f1 100644 --- a/autogpt/poetry.lock +++ b/autogpt/poetry.lock @@ -306,7 +306,7 @@ tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} [[package]] name = "autogpt-forge" -version = "0.1.0" +version = "0.2.0" description = "" optional = false python-versions = "^3.10" @@ -325,7 +325,7 @@ click = "*" colorama = "^0.4.6" demjson3 = "^3.0.0" docker = "*" -duckduckgo-search = "^5.0.0" +duckduckgo-search = "^6.1.7" fastapi = "^0.109.1" gitpython = "^3.1.32" google-api-python-client = "*" @@ -359,7 +359,7 @@ watchdog = "4.0.0" webdriver-manager = "^4.0.1" [package.extras] -benchmark = ["agbenchmark"] +benchmark = ["agbenchmark @ file:///home/reinier/code/agpt/AutoGPT/benchmark"] [package.source] type = "directory" @@ -1130,34 +1130,6 @@ files = [ {file = "cssselect-1.2.0.tar.gz", hash = "sha256:666b19839cfaddb9ce9d36bfe4c969132c647b92fc9088c4e23f786b30f1b3dc"}, ] -[[package]] -name = "curl-cffi" -version = "0.7.0b4" -description = "libcurl ffi bindings for Python, with impersonation support." -optional = false -python-versions = ">=3.8" -files = [ - {file = "curl_cffi-0.7.0b4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:694d88f7065c59c651970f14bc415431f65ac601a9ba537463d70f432a48ccfc"}, - {file = "curl_cffi-0.7.0b4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6faf01aa8d98d322b877d3d801544692c73729ea6eb4a45af83514a4ecd1c8fe"}, - {file = "curl_cffi-0.7.0b4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d39849371bbf3eab048113693715a8da5c729c494cccfa1128d768d96fdc31e"}, - {file = "curl_cffi-0.7.0b4-cp38-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3a5099b98c4bf12cc1afecb3409a9c57e7ebce9447a03c96dfb661ad8fa5e79"}, - {file = "curl_cffi-0.7.0b4-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e3616141a2a0be7896e7dc5da1ed3965e1a78aa2e563d8aba7a641135aeaf1b"}, - {file = "curl_cffi-0.7.0b4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd16cccc0d3e93c2fbc4f4cb7cce0e10cb2ef7f8957352f3f0d770f0d6e05702"}, - {file = "curl_cffi-0.7.0b4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:d65aa649abb24020c2ad7b3ce45e2816d1ffe25df06f1a6b0f52fbf353af82e0"}, - {file = "curl_cffi-0.7.0b4-cp38-abi3-win32.whl", hash = "sha256:b55c53bb6dff713cb63f76e2f147e2d54c984b1b09df66b08f52f3acae1aeca0"}, - {file = "curl_cffi-0.7.0b4-cp38-abi3-win_amd64.whl", hash = "sha256:449ab07e07335558997cd62296b5c4f16ce27630de7830e4ad22441049a0ef1e"}, - {file = "curl_cffi-0.7.0b4.tar.gz", hash = "sha256:c09a062b8aac93d4890d2c33b7053c0e1a5cf275328b80c1fb1a950310df75f2"}, -] - -[package.dependencies] -certifi = ">=2024.2.2" -cffi = ">=1.12.0" - -[package.extras] -build = ["cibuildwheel", "wheel"] -dev = ["charset-normalizer (>=3.3.2,<4.0)", "coverage (>=6.4.1,<7.0)", "cryptography (>=42.0.5,<43.0)", "httpx (==0.23.1)", "mypy (>=1.9.0,<2.0)", "pytest (>=8.1.1,<9.0)", "pytest-asyncio (>=0.23.6,<1.0)", "pytest-trio (>=0.8.0,<1.0)", "ruff (>=0.3.5,<1.0)", "trio (>=0.25.0,<1.0)", "trustme (>=1.1.0,<2.0)", "uvicorn (>=0.29.0,<1.0)", "websockets (>=12.0,<13.0)"] -test = ["charset-normalizer (>=3.3.2,<4.0)", "cryptography (>=42.0.5,<43.0)", "fastapi (==0.110.0)", "httpx (==0.23.1)", "proxy.py (>=2.4.3,<3.0)", "pytest (>=8.1.1,<9.0)", "pytest-asyncio (>=0.23.6,<1.0)", "pytest-trio (>=0.8.0,<1.0)", "python-multipart (>=0.0.9,<1.0)", "trio (>=0.25.0,<1.0)", "trustme (>=1.1.0,<2.0)", "uvicorn (>=0.29.0,<1.0)", "websockets (>=12.0,<13.0)"] - [[package]] name = "cx-freeze" version = "7.0.0" @@ -1391,23 +1363,23 @@ websockets = ["websocket-client (>=1.3.0)"] [[package]] name = "duckduckgo-search" -version = "5.3.1" +version = "6.1.7" description = "Search for words, documents, images, news, maps and text translation using the DuckDuckGo.com search engine." optional = false python-versions = ">=3.8" files = [ - {file = "duckduckgo_search-5.3.1-py3-none-any.whl", hash = "sha256:a07d94babe45c9a9bd0ce2dc185346b47fe95dab516d414f21f06a0a1200aca9"}, - {file = "duckduckgo_search-5.3.1.tar.gz", hash = "sha256:2ee309e76b7e34ee84bddd5e046df723faecf7f999acdb499f3dad7e8a614c21"}, + {file = "duckduckgo_search-6.1.7-py3-none-any.whl", hash = "sha256:ec7d5becb8c392c0293ff9464938c1014896e1e14725c05adc306290a636fab2"}, + {file = "duckduckgo_search-6.1.7.tar.gz", hash = "sha256:c6fd8ba17fe9cd0a4f32e5b96984e959c3da865f9c2864bfcf82bf7ff9b7e8f0"}, ] [package.dependencies] click = ">=8.1.7" -curl-cffi = ">=0.7.0b4" -orjson = ">=3.10.3" +orjson = ">=3.10.5" +pyreqwest-impersonate = ">=0.4.8" [package.extras] -dev = ["mypy (>=1.10.0)", "pytest (>=8.2.0)", "ruff (>=0.4.3)"] -lxml = ["lxml (>=5.2.1)"] +dev = ["mypy (>=1.10.0)", "pytest (>=8.2.2)", "pytest-asyncio (>=0.23.7)", "ruff (>=0.4.8)"] +lxml = ["lxml (>=5.2.2)"] [[package]] name = "en-core-web-sm" @@ -3793,57 +3765,57 @@ files = [ [[package]] name = "orjson" -version = "3.10.3" +version = "3.10.5" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" files = [ - {file = "orjson-3.10.3-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9fb6c3f9f5490a3eb4ddd46fc1b6eadb0d6fc16fb3f07320149c3286a1409dd8"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:252124b198662eee80428f1af8c63f7ff077c88723fe206a25df8dc57a57b1fa"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9f3e87733823089a338ef9bbf363ef4de45e5c599a9bf50a7a9b82e86d0228da"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8334c0d87103bb9fbbe59b78129f1f40d1d1e8355bbed2ca71853af15fa4ed3"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1952c03439e4dce23482ac846e7961f9d4ec62086eb98ae76d97bd41d72644d7"}, - {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c0403ed9c706dcd2809f1600ed18f4aae50be263bd7112e54b50e2c2bc3ebd6d"}, - {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:382e52aa4270a037d41f325e7d1dfa395b7de0c367800b6f337d8157367bf3a7"}, - {file = "orjson-3.10.3-cp310-none-win32.whl", hash = "sha256:be2aab54313752c04f2cbaab4515291ef5af8c2256ce22abc007f89f42f49109"}, - {file = "orjson-3.10.3-cp310-none-win_amd64.whl", hash = "sha256:416b195f78ae461601893f482287cee1e3059ec49b4f99479aedf22a20b1098b"}, - {file = "orjson-3.10.3-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:73100d9abbbe730331f2242c1fc0bcb46a3ea3b4ae3348847e5a141265479700"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a12eee96e3ab828dbfcb4d5a0023aa971b27143a1d35dc214c176fdfb29b3"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520de5e2ef0b4ae546bea25129d6c7c74edb43fc6cf5213f511a927f2b28148b"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccaa0a401fc02e8828a5bedfd80f8cd389d24f65e5ca3954d72c6582495b4bcf"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7bc9e8bc11bac40f905640acd41cbeaa87209e7e1f57ade386da658092dc16"}, - {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3582b34b70543a1ed6944aca75e219e1192661a63da4d039d088a09c67543b08"}, - {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c23dfa91481de880890d17aa7b91d586a4746a4c2aa9a145bebdbaf233768d5"}, - {file = "orjson-3.10.3-cp311-none-win32.whl", hash = "sha256:1770e2a0eae728b050705206d84eda8b074b65ee835e7f85c919f5705b006c9b"}, - {file = "orjson-3.10.3-cp311-none-win_amd64.whl", hash = "sha256:93433b3c1f852660eb5abdc1f4dd0ced2be031ba30900433223b28ee0140cde5"}, - {file = "orjson-3.10.3-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a39aa73e53bec8d410875683bfa3a8edf61e5a1c7bb4014f65f81d36467ea098"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0943a96b3fa09bee1afdfccc2cb236c9c64715afa375b2af296c73d91c23eab2"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e852baafceff8da3c9defae29414cc8513a1586ad93e45f27b89a639c68e8176"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18566beb5acd76f3769c1d1a7ec06cdb81edc4d55d2765fb677e3eaa10fa99e0"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd2218d5a3aa43060efe649ec564ebedec8ce6ae0a43654b81376216d5ebd42"}, - {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cf20465e74c6e17a104ecf01bf8cd3b7b252565b4ccee4548f18b012ff2f8069"}, - {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ba7f67aa7f983c4345eeda16054a4677289011a478ca947cd69c0a86ea45e534"}, - {file = "orjson-3.10.3-cp312-none-win32.whl", hash = "sha256:17e0713fc159abc261eea0f4feda611d32eabc35708b74bef6ad44f6c78d5ea0"}, - {file = "orjson-3.10.3-cp312-none-win_amd64.whl", hash = "sha256:4c895383b1ec42b017dd2c75ae8a5b862fc489006afde06f14afbdd0309b2af0"}, - {file = "orjson-3.10.3-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:be2719e5041e9fb76c8c2c06b9600fe8e8584e6980061ff88dcbc2691a16d20d"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0175a5798bdc878956099f5c54b9837cb62cfbf5d0b86ba6d77e43861bcec2"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:978be58a68ade24f1af7758626806e13cff7748a677faf95fbb298359aa1e20d"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16bda83b5c61586f6f788333d3cf3ed19015e3b9019188c56983b5a299210eb5"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ad1f26bea425041e0a1adad34630c4825a9e3adec49079b1fb6ac8d36f8b754"}, - {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9e253498bee561fe85d6325ba55ff2ff08fb5e7184cd6a4d7754133bd19c9195"}, - {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0a62f9968bab8a676a164263e485f30a0b748255ee2f4ae49a0224be95f4532b"}, - {file = "orjson-3.10.3-cp38-none-win32.whl", hash = "sha256:8d0b84403d287d4bfa9bf7d1dc298d5c1c5d9f444f3737929a66f2fe4fb8f134"}, - {file = "orjson-3.10.3-cp38-none-win_amd64.whl", hash = "sha256:8bc7a4df90da5d535e18157220d7915780d07198b54f4de0110eca6b6c11e290"}, - {file = "orjson-3.10.3-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9059d15c30e675a58fdcd6f95465c1522b8426e092de9fff20edebfdc15e1cb0"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d40c7f7938c9c2b934b297412c067936d0b54e4b8ab916fd1a9eb8f54c02294"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a654ec1de8fdaae1d80d55cee65893cb06494e124681ab335218be6a0691e7"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:831c6ef73f9aa53c5f40ae8f949ff7681b38eaddb6904aab89dca4d85099cb78"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99b880d7e34542db89f48d14ddecbd26f06838b12427d5a25d71baceb5ba119d"}, - {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e5e176c994ce4bd434d7aafb9ecc893c15f347d3d2bbd8e7ce0b63071c52e25"}, - {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b69a58a37dab856491bf2d3bbf259775fdce262b727f96aafbda359cb1d114d8"}, - {file = "orjson-3.10.3-cp39-none-win32.whl", hash = "sha256:b8d4d1a6868cde356f1402c8faeb50d62cee765a1f7ffcfd6de732ab0581e063"}, - {file = "orjson-3.10.3-cp39-none-win_amd64.whl", hash = "sha256:5102f50c5fc46d94f2033fe00d392588564378260d64377aec702f21a7a22912"}, - {file = "orjson-3.10.3.tar.gz", hash = "sha256:2b166507acae7ba2f7c315dcf185a9111ad5e992ac81f2d507aac39193c2c818"}, + {file = "orjson-3.10.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:545d493c1f560d5ccfc134803ceb8955a14c3fcb47bbb4b2fee0232646d0b932"}, + {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4324929c2dd917598212bfd554757feca3e5e0fa60da08be11b4aa8b90013c1"}, + {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c13ca5e2ddded0ce6a927ea5a9f27cae77eee4c75547b4297252cb20c4d30e6"}, + {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6c8e30adfa52c025f042a87f450a6b9ea29649d828e0fec4858ed5e6caecf63"}, + {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:338fd4f071b242f26e9ca802f443edc588fa4ab60bfa81f38beaedf42eda226c"}, + {file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6970ed7a3126cfed873c5d21ece1cd5d6f83ca6c9afb71bbae21a0b034588d96"}, + {file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:235dadefb793ad12f7fa11e98a480db1f7c6469ff9e3da5e73c7809c700d746b"}, + {file = "orjson-3.10.5-cp310-none-win32.whl", hash = "sha256:be79e2393679eda6a590638abda16d167754393f5d0850dcbca2d0c3735cebe2"}, + {file = "orjson-3.10.5-cp310-none-win_amd64.whl", hash = "sha256:c4a65310ccb5c9910c47b078ba78e2787cb3878cdded1702ac3d0da71ddc5228"}, + {file = "orjson-3.10.5-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:cdf7365063e80899ae3a697def1277c17a7df7ccfc979990a403dfe77bb54d40"}, + {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b68742c469745d0e6ca5724506858f75e2f1e5b59a4315861f9e2b1df77775a"}, + {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7d10cc1b594951522e35a3463da19e899abe6ca95f3c84c69e9e901e0bd93d38"}, + {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcbe82b35d1ac43b0d84072408330fd3295c2896973112d495e7234f7e3da2e1"}, + {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c0eb7e0c75e1e486c7563fe231b40fdd658a035ae125c6ba651ca3b07936f5"}, + {file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:53ed1c879b10de56f35daf06dbc4a0d9a5db98f6ee853c2dbd3ee9d13e6f302f"}, + {file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:099e81a5975237fda3100f918839af95f42f981447ba8f47adb7b6a3cdb078fa"}, + {file = "orjson-3.10.5-cp311-none-win32.whl", hash = "sha256:1146bf85ea37ac421594107195db8bc77104f74bc83e8ee21a2e58596bfb2f04"}, + {file = "orjson-3.10.5-cp311-none-win_amd64.whl", hash = "sha256:36a10f43c5f3a55c2f680efe07aa93ef4a342d2960dd2b1b7ea2dd764fe4a37c"}, + {file = "orjson-3.10.5-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:68f85ecae7af14a585a563ac741b0547a3f291de81cd1e20903e79f25170458f"}, + {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28afa96f496474ce60d3340fe8d9a263aa93ea01201cd2bad844c45cd21f5268"}, + {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cd684927af3e11b6e754df80b9ffafd9fb6adcaa9d3e8fdd5891be5a5cad51e"}, + {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d21b9983da032505f7050795e98b5d9eee0df903258951566ecc358f6696969"}, + {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ad1de7fef79736dde8c3554e75361ec351158a906d747bd901a52a5c9c8d24b"}, + {file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d97531cdfe9bdd76d492e69800afd97e5930cb0da6a825646667b2c6c6c0211"}, + {file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d69858c32f09c3e1ce44b617b3ebba1aba030e777000ebdf72b0d8e365d0b2b3"}, + {file = "orjson-3.10.5-cp312-none-win32.whl", hash = "sha256:64c9cc089f127e5875901ac05e5c25aa13cfa5dbbbd9602bda51e5c611d6e3e2"}, + {file = "orjson-3.10.5-cp312-none-win_amd64.whl", hash = "sha256:b2efbd67feff8c1f7728937c0d7f6ca8c25ec81373dc8db4ef394c1d93d13dc5"}, + {file = "orjson-3.10.5-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:03b565c3b93f5d6e001db48b747d31ea3819b89abf041ee10ac6988886d18e01"}, + {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:584c902ec19ab7928fd5add1783c909094cc53f31ac7acfada817b0847975f26"}, + {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a35455cc0b0b3a1eaf67224035f5388591ec72b9b6136d66b49a553ce9eb1e6"}, + {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1670fe88b116c2745a3a30b0f099b699a02bb3482c2591514baf5433819e4f4d"}, + {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:185c394ef45b18b9a7d8e8f333606e2e8194a50c6e3c664215aae8cf42c5385e"}, + {file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ca0b3a94ac8d3886c9581b9f9de3ce858263865fdaa383fbc31c310b9eac07c9"}, + {file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dfc91d4720d48e2a709e9c368d5125b4b5899dced34b5400c3837dadc7d6271b"}, + {file = "orjson-3.10.5-cp38-none-win32.whl", hash = "sha256:c05f16701ab2a4ca146d0bca950af254cb7c02f3c01fca8efbbad82d23b3d9d4"}, + {file = "orjson-3.10.5-cp38-none-win_amd64.whl", hash = "sha256:8a11d459338f96a9aa7f232ba95679fc0c7cedbd1b990d736467894210205c09"}, + {file = "orjson-3.10.5-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:85c89131d7b3218db1b24c4abecea92fd6c7f9fab87441cfc342d3acc725d807"}, + {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66215277a230c456f9038d5e2d84778141643207f85336ef8d2a9da26bd7ca"}, + {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51bbcdea96cdefa4a9b4461e690c75ad4e33796530d182bdd5c38980202c134a"}, + {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbead71dbe65f959b7bd8cf91e0e11d5338033eba34c114f69078d59827ee139"}, + {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df58d206e78c40da118a8c14fc189207fffdcb1f21b3b4c9c0c18e839b5a214"}, + {file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c4057c3b511bb8aef605616bd3f1f002a697c7e4da6adf095ca5b84c0fd43595"}, + {file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b39e006b00c57125ab974362e740c14a0c6a66ff695bff44615dcf4a70ce2b86"}, + {file = "orjson-3.10.5-cp39-none-win32.whl", hash = "sha256:eded5138cc565a9d618e111c6d5c2547bbdd951114eb822f7f6309e04db0fb47"}, + {file = "orjson-3.10.5-cp39-none-win_amd64.whl", hash = "sha256:cc28e90a7cae7fcba2493953cff61da5a52950e78dc2dacfe931a317ee3d8de7"}, + {file = "orjson-3.10.5.tar.gz", hash = "sha256:7a5baef8a4284405d96c90c7c62b755e9ef1ada84c2406c24a9ebec86b89f46d"}, ] [[package]] @@ -4677,6 +4649,66 @@ files = [ {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, ] +[[package]] +name = "pyreqwest-impersonate" +version = "0.4.8" +description = "HTTP client that can impersonate web browsers, mimicking their headers and `TLS/JA3/JA4/HTTP2` fingerprints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyreqwest_impersonate-0.4.8-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:45cad57afe4e6f56078ed9a7a90d0dc839d19d3e7a70175c80af21017f383bfb"}, + {file = "pyreqwest_impersonate-0.4.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1986600253baf38f25fd07b8bdc1903359c26e5d34beb7d7d084845554b5664d"}, + {file = "pyreqwest_impersonate-0.4.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cca4e6e59b9ad0cd20bad6caed3ac96992cd9c1d3126ecdfcab2c0ac2b75376"}, + {file = "pyreqwest_impersonate-0.4.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab6b32544491ee655264dab86fc8a58e47c4f87d196b28022d4007faf971a50"}, + {file = "pyreqwest_impersonate-0.4.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:64bd6299e7fc888bb7f7292cf3e29504c406e5d5d04afd37ca994ab8142d8ee4"}, + {file = "pyreqwest_impersonate-0.4.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e914b650dd953b8d9b24ef56aa4ecbfc16e399227b68accd818f8bf159e0c558"}, + {file = "pyreqwest_impersonate-0.4.8-cp310-none-win_amd64.whl", hash = "sha256:cb56a2149b0c4548a8e0158b071a943f33dae9b717f92b5c9ac34ccd1f5a958c"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f62620e023490902feca0109f306e122e427feff7d59e03ecd22c69a89452367"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:08d4c01d76da88cfe3d7d03b311b375ce3fb5a59130f93f0637bb755d6e56ff1"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6524e276bc460176c79d7ba4b9131d9db73c534586660371ebdf067749252a33"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22863bc0aaf02ca2f5d76c8130929ae680b7d82dfc1c28c1ed5f306ff626928"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8cc82d57f6a91037e64a7aa9122f909576ef2a141a42ce599958ef9f8c4bc033"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:da8a053308210e44fd8349f07f45442a0691ac932f2881e98b05cf9ac404b091"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-none-win_amd64.whl", hash = "sha256:4baf3916c14364a815a64ead7f728afb61b37541933b2771f18dbb245029bb55"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:78db05deed0b32c9c75f2b3168a3a9b7d5e36487b218cb839bfe7e2a143450cb"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9af9446d605903c2b4e94621a9093f8d8a403729bc9cbfbcb62929f8238c838f"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c55890181d8d81e66cac25a95e215dc9680645d01e9091b64449d5407ad9bc6"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e69344e7ae9964502a8693da7ad77ebc3e1418ee197e2e394bc23c5d4970772a"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b5db5c957a10d8cc2815085ba0b8fe09245b2f94c2225d9653a854a03b4217e1"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:03c19c21f63f9c91c590c4bbcc32cc2d8066b508c683a1d163b8c7d9816a01d5"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-none-win_amd64.whl", hash = "sha256:0230610779129f74ff802c744643ce7589b1d07cba21d046fe3b574281c29581"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b8cb9471ab4b2fa7e80d3ac4e580249ff988d782f2938ad1f0428433652b170d"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8081a5ace2658be91519902bde9ddc5f94e1f850a39be196007a25e3da5bbfdc"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69eababfa3200459276acd780a0f3eaf41d1fe7c02bd169e714cba422055b5b9"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:632957fa671ebb841166e40913015de457225cb73600ef250c436c280e68bf45"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2ce7ddef334b4e5c68f5ea1da1d65f686b8d84f4443059d128e0f069d3fa499a"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6ce333d450b158d582e36317089a006440b4e66739a8e8849d170e4cb15e8c8d"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-none-win_amd64.whl", hash = "sha256:9d9c85ce19db92362854f534807e470f03e905f283a7de6826dc79b790a8788e"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2503277f2a95a30e28e498570e2ed03ef4302f873054e8e21d6c0e607cbbc1d1"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8260395ef4ddae325e8b30cef0391adde7bd35e1a1decf8c729e26391f09b52d"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d8066b46d82bbaff5402d767e2f13d3449b8191c37bf8283e91d301a7159869"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9c42f6343cfbd6663fb53edc9eb9feb4ebf6186b284e22368adc1eeb6a33854"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ff534f491a059e74fb7f994876df86078b4b125dbecc53c098a298ecd55fa9c6"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5b8fbf73b3ac513ddadafd338d61f79cd2370f0691d9175b2b92a45920920d6b"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-none-win_amd64.whl", hash = "sha256:a26447c82665d0e361207c1a15e56b0ca54974aa6c1fdfa18c68f908dec78cbe"}, + {file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24a16b8d55309f0af0db9d04ff442b0c91afccf078a94809e7c3a71747a5c214"}, + {file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c8fada56465fc19179404cc9d5d5e1064f5dfe27405cb052f57a5b4fe06aed1"}, + {file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a3d48d5abc146fd804395713427d944757a99254350e6a651e7d776818074aee"}, + {file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:475829fe9994c66258157a8d4adb1c038f44f79f901208ba656d547842337227"}, + {file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef1ec0e97623bc0e18469418cc4dd2c59a2d5fddcae944de61e13c0b46f910e"}, + {file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91857b196de89e9b36d3f8629aa8772c0bbe7efef8334fe266956b1c192ec31c"}, + {file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:63831e407487b8a21bb51f97cd86a616c291d5138f8caec16ab6019cf6423935"}, + {file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c30e61de93bcd0a9d3ca226b1ae5475002afde61e9d85018a6a4a040eeb86567"}, + {file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6c72c37b03bce9900f5dbb4f476af17253ec60c13bf7a7259f71a8dc1b036cb"}, + {file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f1096165741b5c2178ab15b0eb09b5de16dd39b1cc135767d72471f0a69ce"}, + {file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:70c940c0e4ef335e22a6c705b01f286ee44780b5909065d212d94d82ea2580cb"}, + {file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:81c06f21757602d85f16dbc1cbaee1121cd65455f65aed4c048b7dcda7be85c4"}, + {file = "pyreqwest_impersonate-0.4.8.tar.gz", hash = "sha256:1eba11d47bd17244c64fec1502cc26ee66cc5c8a3be131e408101ae2b455e5bc"}, +] + +[package.extras] +dev = ["pytest (>=8.1.1)"] + [[package]] name = "pyright" version = "1.1.366" diff --git a/forge/poetry.lock b/forge/poetry.lock index d713a148cf5f..45b04dafb29a 100644 --- a/forge/poetry.lock +++ b/forge/poetry.lock @@ -1444,34 +1444,6 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] -[[package]] -name = "curl-cffi" -version = "0.7.0b4" -description = "libcurl ffi bindings for Python, with impersonation support." -optional = false -python-versions = ">=3.8" -files = [ - {file = "curl_cffi-0.7.0b4-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:694d88f7065c59c651970f14bc415431f65ac601a9ba537463d70f432a48ccfc"}, - {file = "curl_cffi-0.7.0b4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6faf01aa8d98d322b877d3d801544692c73729ea6eb4a45af83514a4ecd1c8fe"}, - {file = "curl_cffi-0.7.0b4-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d39849371bbf3eab048113693715a8da5c729c494cccfa1128d768d96fdc31e"}, - {file = "curl_cffi-0.7.0b4-cp38-abi3-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3a5099b98c4bf12cc1afecb3409a9c57e7ebce9447a03c96dfb661ad8fa5e79"}, - {file = "curl_cffi-0.7.0b4-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e3616141a2a0be7896e7dc5da1ed3965e1a78aa2e563d8aba7a641135aeaf1b"}, - {file = "curl_cffi-0.7.0b4-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bd16cccc0d3e93c2fbc4f4cb7cce0e10cb2ef7f8957352f3f0d770f0d6e05702"}, - {file = "curl_cffi-0.7.0b4-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:d65aa649abb24020c2ad7b3ce45e2816d1ffe25df06f1a6b0f52fbf353af82e0"}, - {file = "curl_cffi-0.7.0b4-cp38-abi3-win32.whl", hash = "sha256:b55c53bb6dff713cb63f76e2f147e2d54c984b1b09df66b08f52f3acae1aeca0"}, - {file = "curl_cffi-0.7.0b4-cp38-abi3-win_amd64.whl", hash = "sha256:449ab07e07335558997cd62296b5c4f16ce27630de7830e4ad22441049a0ef1e"}, - {file = "curl_cffi-0.7.0b4.tar.gz", hash = "sha256:c09a062b8aac93d4890d2c33b7053c0e1a5cf275328b80c1fb1a950310df75f2"}, -] - -[package.dependencies] -certifi = ">=2024.2.2" -cffi = ">=1.12.0" - -[package.extras] -build = ["cibuildwheel", "wheel"] -dev = ["charset-normalizer (>=3.3.2,<4.0)", "coverage (>=6.4.1,<7.0)", "cryptography (>=42.0.5,<43.0)", "httpx (==0.23.1)", "mypy (>=1.9.0,<2.0)", "pytest (>=8.1.1,<9.0)", "pytest-asyncio (>=0.23.6,<1.0)", "pytest-trio (>=0.8.0,<1.0)", "ruff (>=0.3.5,<1.0)", "trio (>=0.25.0,<1.0)", "trustme (>=1.1.0,<2.0)", "uvicorn (>=0.29.0,<1.0)", "websockets (>=12.0,<13.0)"] -test = ["charset-normalizer (>=3.3.2,<4.0)", "cryptography (>=42.0.5,<43.0)", "fastapi (==0.110.0)", "httpx (==0.23.1)", "proxy.py (>=2.4.3,<3.0)", "pytest (>=8.1.1,<9.0)", "pytest-asyncio (>=0.23.6,<1.0)", "pytest-trio (>=0.8.0,<1.0)", "python-multipart (>=0.0.9,<1.0)", "trio (>=0.25.0,<1.0)", "trustme (>=1.1.0,<2.0)", "uvicorn (>=0.29.0,<1.0)", "websockets (>=12.0,<13.0)"] - [[package]] name = "cycler" version = "0.12.1" @@ -1612,23 +1584,23 @@ websockets = ["websocket-client (>=1.3.0)"] [[package]] name = "duckduckgo-search" -version = "5.3.1" +version = "6.1.7" description = "Search for words, documents, images, news, maps and text translation using the DuckDuckGo.com search engine." optional = false python-versions = ">=3.8" files = [ - {file = "duckduckgo_search-5.3.1-py3-none-any.whl", hash = "sha256:a07d94babe45c9a9bd0ce2dc185346b47fe95dab516d414f21f06a0a1200aca9"}, - {file = "duckduckgo_search-5.3.1.tar.gz", hash = "sha256:2ee309e76b7e34ee84bddd5e046df723faecf7f999acdb499f3dad7e8a614c21"}, + {file = "duckduckgo_search-6.1.7-py3-none-any.whl", hash = "sha256:ec7d5becb8c392c0293ff9464938c1014896e1e14725c05adc306290a636fab2"}, + {file = "duckduckgo_search-6.1.7.tar.gz", hash = "sha256:c6fd8ba17fe9cd0a4f32e5b96984e959c3da865f9c2864bfcf82bf7ff9b7e8f0"}, ] [package.dependencies] click = ">=8.1.7" -curl-cffi = ">=0.7.0b4" -orjson = ">=3.10.3" +orjson = ">=3.10.5" +pyreqwest-impersonate = ">=0.4.8" [package.extras] -dev = ["mypy (>=1.10.0)", "pytest (>=8.2.0)", "ruff (>=0.4.3)"] -lxml = ["lxml (>=5.2.1)"] +dev = ["mypy (>=1.10.0)", "pytest (>=8.2.2)", "pytest-asyncio (>=0.23.7)", "ruff (>=0.4.8)"] +lxml = ["lxml (>=5.2.2)"] [[package]] name = "exceptiongroup" @@ -3974,57 +3946,57 @@ files = [ [[package]] name = "orjson" -version = "3.10.3" +version = "3.10.5" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" files = [ - {file = "orjson-3.10.3-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9fb6c3f9f5490a3eb4ddd46fc1b6eadb0d6fc16fb3f07320149c3286a1409dd8"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:252124b198662eee80428f1af8c63f7ff077c88723fe206a25df8dc57a57b1fa"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9f3e87733823089a338ef9bbf363ef4de45e5c599a9bf50a7a9b82e86d0228da"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8334c0d87103bb9fbbe59b78129f1f40d1d1e8355bbed2ca71853af15fa4ed3"}, - {file = "orjson-3.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1952c03439e4dce23482ac846e7961f9d4ec62086eb98ae76d97bd41d72644d7"}, - {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c0403ed9c706dcd2809f1600ed18f4aae50be263bd7112e54b50e2c2bc3ebd6d"}, - {file = "orjson-3.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:382e52aa4270a037d41f325e7d1dfa395b7de0c367800b6f337d8157367bf3a7"}, - {file = "orjson-3.10.3-cp310-none-win32.whl", hash = "sha256:be2aab54313752c04f2cbaab4515291ef5af8c2256ce22abc007f89f42f49109"}, - {file = "orjson-3.10.3-cp310-none-win_amd64.whl", hash = "sha256:416b195f78ae461601893f482287cee1e3059ec49b4f99479aedf22a20b1098b"}, - {file = "orjson-3.10.3-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:73100d9abbbe730331f2242c1fc0bcb46a3ea3b4ae3348847e5a141265479700"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:544a12eee96e3ab828dbfcb4d5a0023aa971b27143a1d35dc214c176fdfb29b3"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520de5e2ef0b4ae546bea25129d6c7c74edb43fc6cf5213f511a927f2b28148b"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ccaa0a401fc02e8828a5bedfd80f8cd389d24f65e5ca3954d72c6582495b4bcf"}, - {file = "orjson-3.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a7bc9e8bc11bac40f905640acd41cbeaa87209e7e1f57ade386da658092dc16"}, - {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3582b34b70543a1ed6944aca75e219e1192661a63da4d039d088a09c67543b08"}, - {file = "orjson-3.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1c23dfa91481de880890d17aa7b91d586a4746a4c2aa9a145bebdbaf233768d5"}, - {file = "orjson-3.10.3-cp311-none-win32.whl", hash = "sha256:1770e2a0eae728b050705206d84eda8b074b65ee835e7f85c919f5705b006c9b"}, - {file = "orjson-3.10.3-cp311-none-win_amd64.whl", hash = "sha256:93433b3c1f852660eb5abdc1f4dd0ced2be031ba30900433223b28ee0140cde5"}, - {file = "orjson-3.10.3-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:a39aa73e53bec8d410875683bfa3a8edf61e5a1c7bb4014f65f81d36467ea098"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0943a96b3fa09bee1afdfccc2cb236c9c64715afa375b2af296c73d91c23eab2"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e852baafceff8da3c9defae29414cc8513a1586ad93e45f27b89a639c68e8176"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18566beb5acd76f3769c1d1a7ec06cdb81edc4d55d2765fb677e3eaa10fa99e0"}, - {file = "orjson-3.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bd2218d5a3aa43060efe649ec564ebedec8ce6ae0a43654b81376216d5ebd42"}, - {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cf20465e74c6e17a104ecf01bf8cd3b7b252565b4ccee4548f18b012ff2f8069"}, - {file = "orjson-3.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ba7f67aa7f983c4345eeda16054a4677289011a478ca947cd69c0a86ea45e534"}, - {file = "orjson-3.10.3-cp312-none-win32.whl", hash = "sha256:17e0713fc159abc261eea0f4feda611d32eabc35708b74bef6ad44f6c78d5ea0"}, - {file = "orjson-3.10.3-cp312-none-win_amd64.whl", hash = "sha256:4c895383b1ec42b017dd2c75ae8a5b862fc489006afde06f14afbdd0309b2af0"}, - {file = "orjson-3.10.3-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:be2719e5041e9fb76c8c2c06b9600fe8e8584e6980061ff88dcbc2691a16d20d"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0175a5798bdc878956099f5c54b9837cb62cfbf5d0b86ba6d77e43861bcec2"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:978be58a68ade24f1af7758626806e13cff7748a677faf95fbb298359aa1e20d"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16bda83b5c61586f6f788333d3cf3ed19015e3b9019188c56983b5a299210eb5"}, - {file = "orjson-3.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ad1f26bea425041e0a1adad34630c4825a9e3adec49079b1fb6ac8d36f8b754"}, - {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:9e253498bee561fe85d6325ba55ff2ff08fb5e7184cd6a4d7754133bd19c9195"}, - {file = "orjson-3.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0a62f9968bab8a676a164263e485f30a0b748255ee2f4ae49a0224be95f4532b"}, - {file = "orjson-3.10.3-cp38-none-win32.whl", hash = "sha256:8d0b84403d287d4bfa9bf7d1dc298d5c1c5d9f444f3737929a66f2fe4fb8f134"}, - {file = "orjson-3.10.3-cp38-none-win_amd64.whl", hash = "sha256:8bc7a4df90da5d535e18157220d7915780d07198b54f4de0110eca6b6c11e290"}, - {file = "orjson-3.10.3-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9059d15c30e675a58fdcd6f95465c1522b8426e092de9fff20edebfdc15e1cb0"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d40c7f7938c9c2b934b297412c067936d0b54e4b8ab916fd1a9eb8f54c02294"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a654ec1de8fdaae1d80d55cee65893cb06494e124681ab335218be6a0691e7"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:831c6ef73f9aa53c5f40ae8f949ff7681b38eaddb6904aab89dca4d85099cb78"}, - {file = "orjson-3.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99b880d7e34542db89f48d14ddecbd26f06838b12427d5a25d71baceb5ba119d"}, - {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2e5e176c994ce4bd434d7aafb9ecc893c15f347d3d2bbd8e7ce0b63071c52e25"}, - {file = "orjson-3.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b69a58a37dab856491bf2d3bbf259775fdce262b727f96aafbda359cb1d114d8"}, - {file = "orjson-3.10.3-cp39-none-win32.whl", hash = "sha256:b8d4d1a6868cde356f1402c8faeb50d62cee765a1f7ffcfd6de732ab0581e063"}, - {file = "orjson-3.10.3-cp39-none-win_amd64.whl", hash = "sha256:5102f50c5fc46d94f2033fe00d392588564378260d64377aec702f21a7a22912"}, - {file = "orjson-3.10.3.tar.gz", hash = "sha256:2b166507acae7ba2f7c315dcf185a9111ad5e992ac81f2d507aac39193c2c818"}, + {file = "orjson-3.10.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:545d493c1f560d5ccfc134803ceb8955a14c3fcb47bbb4b2fee0232646d0b932"}, + {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4324929c2dd917598212bfd554757feca3e5e0fa60da08be11b4aa8b90013c1"}, + {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c13ca5e2ddded0ce6a927ea5a9f27cae77eee4c75547b4297252cb20c4d30e6"}, + {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6c8e30adfa52c025f042a87f450a6b9ea29649d828e0fec4858ed5e6caecf63"}, + {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:338fd4f071b242f26e9ca802f443edc588fa4ab60bfa81f38beaedf42eda226c"}, + {file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6970ed7a3126cfed873c5d21ece1cd5d6f83ca6c9afb71bbae21a0b034588d96"}, + {file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:235dadefb793ad12f7fa11e98a480db1f7c6469ff9e3da5e73c7809c700d746b"}, + {file = "orjson-3.10.5-cp310-none-win32.whl", hash = "sha256:be79e2393679eda6a590638abda16d167754393f5d0850dcbca2d0c3735cebe2"}, + {file = "orjson-3.10.5-cp310-none-win_amd64.whl", hash = "sha256:c4a65310ccb5c9910c47b078ba78e2787cb3878cdded1702ac3d0da71ddc5228"}, + {file = "orjson-3.10.5-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:cdf7365063e80899ae3a697def1277c17a7df7ccfc979990a403dfe77bb54d40"}, + {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b68742c469745d0e6ca5724506858f75e2f1e5b59a4315861f9e2b1df77775a"}, + {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7d10cc1b594951522e35a3463da19e899abe6ca95f3c84c69e9e901e0bd93d38"}, + {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcbe82b35d1ac43b0d84072408330fd3295c2896973112d495e7234f7e3da2e1"}, + {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c0eb7e0c75e1e486c7563fe231b40fdd658a035ae125c6ba651ca3b07936f5"}, + {file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:53ed1c879b10de56f35daf06dbc4a0d9a5db98f6ee853c2dbd3ee9d13e6f302f"}, + {file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:099e81a5975237fda3100f918839af95f42f981447ba8f47adb7b6a3cdb078fa"}, + {file = "orjson-3.10.5-cp311-none-win32.whl", hash = "sha256:1146bf85ea37ac421594107195db8bc77104f74bc83e8ee21a2e58596bfb2f04"}, + {file = "orjson-3.10.5-cp311-none-win_amd64.whl", hash = "sha256:36a10f43c5f3a55c2f680efe07aa93ef4a342d2960dd2b1b7ea2dd764fe4a37c"}, + {file = "orjson-3.10.5-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:68f85ecae7af14a585a563ac741b0547a3f291de81cd1e20903e79f25170458f"}, + {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28afa96f496474ce60d3340fe8d9a263aa93ea01201cd2bad844c45cd21f5268"}, + {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cd684927af3e11b6e754df80b9ffafd9fb6adcaa9d3e8fdd5891be5a5cad51e"}, + {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d21b9983da032505f7050795e98b5d9eee0df903258951566ecc358f6696969"}, + {file = "orjson-3.10.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ad1de7fef79736dde8c3554e75361ec351158a906d747bd901a52a5c9c8d24b"}, + {file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d97531cdfe9bdd76d492e69800afd97e5930cb0da6a825646667b2c6c6c0211"}, + {file = "orjson-3.10.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d69858c32f09c3e1ce44b617b3ebba1aba030e777000ebdf72b0d8e365d0b2b3"}, + {file = "orjson-3.10.5-cp312-none-win32.whl", hash = "sha256:64c9cc089f127e5875901ac05e5c25aa13cfa5dbbbd9602bda51e5c611d6e3e2"}, + {file = "orjson-3.10.5-cp312-none-win_amd64.whl", hash = "sha256:b2efbd67feff8c1f7728937c0d7f6ca8c25ec81373dc8db4ef394c1d93d13dc5"}, + {file = "orjson-3.10.5-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:03b565c3b93f5d6e001db48b747d31ea3819b89abf041ee10ac6988886d18e01"}, + {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:584c902ec19ab7928fd5add1783c909094cc53f31ac7acfada817b0847975f26"}, + {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a35455cc0b0b3a1eaf67224035f5388591ec72b9b6136d66b49a553ce9eb1e6"}, + {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1670fe88b116c2745a3a30b0f099b699a02bb3482c2591514baf5433819e4f4d"}, + {file = "orjson-3.10.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:185c394ef45b18b9a7d8e8f333606e2e8194a50c6e3c664215aae8cf42c5385e"}, + {file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ca0b3a94ac8d3886c9581b9f9de3ce858263865fdaa383fbc31c310b9eac07c9"}, + {file = "orjson-3.10.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dfc91d4720d48e2a709e9c368d5125b4b5899dced34b5400c3837dadc7d6271b"}, + {file = "orjson-3.10.5-cp38-none-win32.whl", hash = "sha256:c05f16701ab2a4ca146d0bca950af254cb7c02f3c01fca8efbbad82d23b3d9d4"}, + {file = "orjson-3.10.5-cp38-none-win_amd64.whl", hash = "sha256:8a11d459338f96a9aa7f232ba95679fc0c7cedbd1b990d736467894210205c09"}, + {file = "orjson-3.10.5-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:85c89131d7b3218db1b24c4abecea92fd6c7f9fab87441cfc342d3acc725d807"}, + {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66215277a230c456f9038d5e2d84778141643207f85336ef8d2a9da26bd7ca"}, + {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51bbcdea96cdefa4a9b4461e690c75ad4e33796530d182bdd5c38980202c134a"}, + {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbead71dbe65f959b7bd8cf91e0e11d5338033eba34c114f69078d59827ee139"}, + {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df58d206e78c40da118a8c14fc189207fffdcb1f21b3b4c9c0c18e839b5a214"}, + {file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c4057c3b511bb8aef605616bd3f1f002a697c7e4da6adf095ca5b84c0fd43595"}, + {file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b39e006b00c57125ab974362e740c14a0c6a66ff695bff44615dcf4a70ce2b86"}, + {file = "orjson-3.10.5-cp39-none-win32.whl", hash = "sha256:eded5138cc565a9d618e111c6d5c2547bbdd951114eb822f7f6309e04db0fb47"}, + {file = "orjson-3.10.5-cp39-none-win_amd64.whl", hash = "sha256:cc28e90a7cae7fcba2493953cff61da5a52950e78dc2dacfe931a317ee3d8de7"}, + {file = "orjson-3.10.5.tar.gz", hash = "sha256:7a5baef8a4284405d96c90c7c62b755e9ef1ada84c2406c24a9ebec86b89f46d"}, ] [[package]] @@ -4746,6 +4718,66 @@ files = [ {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, ] +[[package]] +name = "pyreqwest-impersonate" +version = "0.4.8" +description = "HTTP client that can impersonate web browsers, mimicking their headers and `TLS/JA3/JA4/HTTP2` fingerprints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pyreqwest_impersonate-0.4.8-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:45cad57afe4e6f56078ed9a7a90d0dc839d19d3e7a70175c80af21017f383bfb"}, + {file = "pyreqwest_impersonate-0.4.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1986600253baf38f25fd07b8bdc1903359c26e5d34beb7d7d084845554b5664d"}, + {file = "pyreqwest_impersonate-0.4.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cca4e6e59b9ad0cd20bad6caed3ac96992cd9c1d3126ecdfcab2c0ac2b75376"}, + {file = "pyreqwest_impersonate-0.4.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ab6b32544491ee655264dab86fc8a58e47c4f87d196b28022d4007faf971a50"}, + {file = "pyreqwest_impersonate-0.4.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:64bd6299e7fc888bb7f7292cf3e29504c406e5d5d04afd37ca994ab8142d8ee4"}, + {file = "pyreqwest_impersonate-0.4.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e914b650dd953b8d9b24ef56aa4ecbfc16e399227b68accd818f8bf159e0c558"}, + {file = "pyreqwest_impersonate-0.4.8-cp310-none-win_amd64.whl", hash = "sha256:cb56a2149b0c4548a8e0158b071a943f33dae9b717f92b5c9ac34ccd1f5a958c"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f62620e023490902feca0109f306e122e427feff7d59e03ecd22c69a89452367"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:08d4c01d76da88cfe3d7d03b311b375ce3fb5a59130f93f0637bb755d6e56ff1"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6524e276bc460176c79d7ba4b9131d9db73c534586660371ebdf067749252a33"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a22863bc0aaf02ca2f5d76c8130929ae680b7d82dfc1c28c1ed5f306ff626928"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8cc82d57f6a91037e64a7aa9122f909576ef2a141a42ce599958ef9f8c4bc033"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:da8a053308210e44fd8349f07f45442a0691ac932f2881e98b05cf9ac404b091"}, + {file = "pyreqwest_impersonate-0.4.8-cp311-none-win_amd64.whl", hash = "sha256:4baf3916c14364a815a64ead7f728afb61b37541933b2771f18dbb245029bb55"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:78db05deed0b32c9c75f2b3168a3a9b7d5e36487b218cb839bfe7e2a143450cb"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9af9446d605903c2b4e94621a9093f8d8a403729bc9cbfbcb62929f8238c838f"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c55890181d8d81e66cac25a95e215dc9680645d01e9091b64449d5407ad9bc6"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e69344e7ae9964502a8693da7ad77ebc3e1418ee197e2e394bc23c5d4970772a"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b5db5c957a10d8cc2815085ba0b8fe09245b2f94c2225d9653a854a03b4217e1"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:03c19c21f63f9c91c590c4bbcc32cc2d8066b508c683a1d163b8c7d9816a01d5"}, + {file = "pyreqwest_impersonate-0.4.8-cp312-none-win_amd64.whl", hash = "sha256:0230610779129f74ff802c744643ce7589b1d07cba21d046fe3b574281c29581"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b8cb9471ab4b2fa7e80d3ac4e580249ff988d782f2938ad1f0428433652b170d"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8081a5ace2658be91519902bde9ddc5f94e1f850a39be196007a25e3da5bbfdc"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69eababfa3200459276acd780a0f3eaf41d1fe7c02bd169e714cba422055b5b9"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:632957fa671ebb841166e40913015de457225cb73600ef250c436c280e68bf45"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2ce7ddef334b4e5c68f5ea1da1d65f686b8d84f4443059d128e0f069d3fa499a"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6ce333d450b158d582e36317089a006440b4e66739a8e8849d170e4cb15e8c8d"}, + {file = "pyreqwest_impersonate-0.4.8-cp38-none-win_amd64.whl", hash = "sha256:9d9c85ce19db92362854f534807e470f03e905f283a7de6826dc79b790a8788e"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2503277f2a95a30e28e498570e2ed03ef4302f873054e8e21d6c0e607cbbc1d1"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8260395ef4ddae325e8b30cef0391adde7bd35e1a1decf8c729e26391f09b52d"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d8066b46d82bbaff5402d767e2f13d3449b8191c37bf8283e91d301a7159869"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9c42f6343cfbd6663fb53edc9eb9feb4ebf6186b284e22368adc1eeb6a33854"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ff534f491a059e74fb7f994876df86078b4b125dbecc53c098a298ecd55fa9c6"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5b8fbf73b3ac513ddadafd338d61f79cd2370f0691d9175b2b92a45920920d6b"}, + {file = "pyreqwest_impersonate-0.4.8-cp39-none-win_amd64.whl", hash = "sha256:a26447c82665d0e361207c1a15e56b0ca54974aa6c1fdfa18c68f908dec78cbe"}, + {file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24a16b8d55309f0af0db9d04ff442b0c91afccf078a94809e7c3a71747a5c214"}, + {file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c8fada56465fc19179404cc9d5d5e1064f5dfe27405cb052f57a5b4fe06aed1"}, + {file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a3d48d5abc146fd804395713427d944757a99254350e6a651e7d776818074aee"}, + {file = "pyreqwest_impersonate-0.4.8-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:475829fe9994c66258157a8d4adb1c038f44f79f901208ba656d547842337227"}, + {file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ef1ec0e97623bc0e18469418cc4dd2c59a2d5fddcae944de61e13c0b46f910e"}, + {file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91857b196de89e9b36d3f8629aa8772c0bbe7efef8334fe266956b1c192ec31c"}, + {file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:63831e407487b8a21bb51f97cd86a616c291d5138f8caec16ab6019cf6423935"}, + {file = "pyreqwest_impersonate-0.4.8-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c30e61de93bcd0a9d3ca226b1ae5475002afde61e9d85018a6a4a040eeb86567"}, + {file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6c72c37b03bce9900f5dbb4f476af17253ec60c13bf7a7259f71a8dc1b036cb"}, + {file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b1f1096165741b5c2178ab15b0eb09b5de16dd39b1cc135767d72471f0a69ce"}, + {file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:70c940c0e4ef335e22a6c705b01f286ee44780b5909065d212d94d82ea2580cb"}, + {file = "pyreqwest_impersonate-0.4.8-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:81c06f21757602d85f16dbc1cbaee1121cd65455f65aed4c048b7dcda7be85c4"}, + {file = "pyreqwest_impersonate-0.4.8.tar.gz", hash = "sha256:1eba11d47bd17244c64fec1502cc26ee66cc5c8a3be131e408101ae2b455e5bc"}, +] + +[package.extras] +dev = ["pytest (>=8.1.1)"] + [[package]] name = "pyright" version = "1.1.366" @@ -6830,4 +6862,4 @@ benchmark = ["agbenchmark"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "e04a03e3f2663d3e54d5f4e6649cd4442cb00fd17ae5c68d06dbaadfc02ca309" +content-hash = "b02486b166870b64f778f117708c9dab9ce9305340ad5ef28994273925f09c4c" diff --git a/forge/pyproject.toml b/forge/pyproject.toml index 7b4ab1e09ce1..60461e380de2 100644 --- a/forge/pyproject.toml +++ b/forge/pyproject.toml @@ -22,7 +22,7 @@ click = "*" colorama = "^0.4.6" demjson3 = "^3.0.0" docker = "*" -duckduckgo-search = "^5.0.0" +duckduckgo-search = "^6.1.7" fastapi = "^0.109.1" gitpython = "^3.1.32" google-api-python-client = "*" From f04ddceacfcd830f538a0faa7dd22169f4d3930c Mon Sep 17 00:00:00 2001 From: Nicholas Tindle Date: Tue, 25 Jun 2024 19:03:05 -0500 Subject: [PATCH 04/23] docs(forge): Update and rename QUICKSTART.md to FORGE-QUICKSTART.md (#7215) --- QUICKSTART.md => FORGE-QUICKSTART.md | 32 ++++++++++++++-------------- 1 file changed, 16 insertions(+), 16 deletions(-) rename QUICKSTART.md => FORGE-QUICKSTART.md (71%) diff --git a/QUICKSTART.md b/FORGE-QUICKSTART.md similarity index 71% rename from QUICKSTART.md rename to FORGE-QUICKSTART.md index b72ba4994e57..9bedd7abee97 100644 --- a/QUICKSTART.md +++ b/FORGE-QUICKSTART.md @@ -2,11 +2,11 @@ > For the complete getting started [tutorial series](https://aiedge.medium.com/autogpt-forge-e3de53cc58ec) <- click here -Welcome to the Quickstart Guide! This guide will walk you through the process of setting up and running your own AutoGPT agent. Whether you're a seasoned AI developer or just starting out, this guide will provide you with the necessary steps to jumpstart your journey in the world of AI development with AutoGPT. +Welcome to the Quickstart Guide! This guide will walk you through setting up, building, and running your own AutoGPT agent. Whether you're a seasoned AI developer or just starting out, this guide will provide you with the steps to jumpstart your journey in AI development with AutoGPT. ## System Requirements -This project supports Linux (Debian based), Mac, and Windows Subsystem for Linux (WSL). If you are using a Windows system, you will need to install WSL. You can find the installation instructions for WSL [here](https://learn.microsoft.com/en-us/windows/wsl/). +This project supports Linux (Debian-based), Mac, and Windows Subsystem for Linux (WSL). If you use a Windows system, you must install WSL. You can find the installation instructions for WSL [here](https://learn.microsoft.com/en-us/windows/wsl/). ## Getting Setup @@ -18,11 +18,11 @@ This project supports Linux (Debian based), Mac, and Windows Subsystem for Linux - In the top-right corner of the page, click Fork. ![Create Fork UI](docs/content/imgs/quickstart/002_fork.png) - - On the next page, select your GitHub account to create the fork under. + - On the next page, select your GitHub account to create the fork. - Wait for the forking process to complete. You now have a copy of the repository in your GitHub account. 2. **Clone the Repository** - To clone the repository, you need to have Git installed on your system. If you don't have Git installed, you can download it from [here](https://git-scm.com/downloads). Once you have Git installed, follow these steps: + To clone the repository, you need to have Git installed on your system. If you don't have Git installed, download it from [here](https://git-scm.com/downloads). Once you have Git installed, follow these steps: - Open your terminal. - Navigate to the directory where you want to clone the repository. - Run the git clone command for the fork you just created @@ -34,11 +34,11 @@ This project supports Linux (Debian based), Mac, and Windows Subsystem for Linux ![Open the Project in your IDE](docs/content/imgs/quickstart/004_ide.png) 4. **Setup the Project** - Next we need to setup the required dependencies. We have a tool for helping you do all the tasks you need to on the repo. + Next, we need to set up the required dependencies. We have a tool to help you perform all the tasks on the repo. It can be accessed by running the `run` command by typing `./run` in the terminal. - The first command you need to use is `./run setup` This will guide you through the process of setting up your system. - Initially you will get instructions for installing flutter, chrome and setting up your github access token like the following image: + The first command you need to use is `./run setup.` This will guide you through setting up your system. + Initially, you will get instructions for installing Flutter and Chrome and setting up your GitHub access token like the following image: ![Setup the Project](docs/content/imgs/quickstart/005_setup.png) @@ -47,7 +47,7 @@ This project supports Linux (Debian based), Mac, and Windows Subsystem for Linux If you're a Windows user and experience issues after installing WSL, follow the steps below to resolve them. #### Update WSL -Run the following command in Powershell or Command Prompt to: +Run the following command in Powershell or Command Prompt: 1. Enable the optional WSL and Virtual Machine Platform components. 2. Download and install the latest Linux kernel. 3. Set WSL 2 as the default. @@ -73,7 +73,7 @@ dos2unix ./run After executing the above commands, running `./run setup` should work successfully. #### Store Project Files within the WSL File System -If you continue to experience issues, consider storing your project files within the WSL file system instead of the Windows file system. This method avoids issues related to path translations and permissions and provides a more consistent development environment. +If you continue to experience issues, consider storing your project files within the WSL file system instead of the Windows file system. This method avoids path translations and permissions issues and provides a more consistent development environment. You can keep running the command to get feedback on where you are up to with your setup. When setup has been completed, the command will return an output like this: @@ -83,7 +83,7 @@ When setup has been completed, the command will return an output like this: ## Creating Your Agent After completing the setup, the next step is to create your agent template. -Execute the command `./run agent create YOUR_AGENT_NAME`, where `YOUR_AGENT_NAME` should be replaced with a name of your choosing. +Execute the command `./run agent create YOUR_AGENT_NAME`, where `YOUR_AGENT_NAME` should be replaced with your chosen name. Tips for naming your agent: * Give it its own unique name, or name it after yourself @@ -101,21 +101,21 @@ This starts the agent on the URL: `http://localhost:8000/` ![Start the Agent](docs/content/imgs/quickstart/009_start_agent.png) -The frontend can be accessed from `http://localhost:8000/`, you will first need to login using either a google account or your github account. +The front end can be accessed from `http://localhost:8000/`; first, you must log in using either a Google account or your GitHub account. ![Login](docs/content/imgs/quickstart/010_login.png) -Upon logging in you will get a page that looks something like this. With your task history down the left hand side of the page and the 'chat' window to send tasks to your agent. +Upon logging in, you will get a page that looks something like this: your task history down the left-hand side of the page, and the 'chat' window to send tasks to your agent. ![Login](docs/content/imgs/quickstart/011_home.png) -When you have finished with your agent, or if you just need to restart it, use Ctl-C to end the session then you can re-run the start command. +When you have finished with your agent or just need to restart it, use Ctl-C to end the session. Then, you can re-run the start command. -If you are having issues and want to ensure the agent has been stopped there is a `./run agent stop` command which will kill the process using port 8000, which should be the agent. +If you are having issues and want to ensure the agent has been stopped, there is a `./run agent stop` command, which will kill the process using port 8000, which should be the agent. ## Benchmarking your Agent -The benchmarking system can also be accessed using the cli too: +The benchmarking system can also be accessed using the CLI too: ```bash agpt % ./run benchmark @@ -163,7 +163,7 @@ The benchmark has been split into different categories of skills you can test yo ![Login](docs/content/imgs/quickstart/012_tests.png) -Finally you can run the benchmark with +Finally, you can run the benchmark with ```bash ./run benchmark start YOUR_AGENT_NAME From 26bcb26bb7121f071b7aa3af0509063b74fa90fa Mon Sep 17 00:00:00 2001 From: Zamil Majdy Date: Wed, 26 Jun 2024 14:41:55 +0400 Subject: [PATCH 05/23] feat(rnd): Refactor AgentServer Node Input/Output Relation & Block output interface (#7231) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Background The current implementation of AgentServer doesn't allow for a single pin to be connected to multiple nodes, this will be problematic when you have a single output node that needs to be propagated into many nodes. Or multiple nodes that possibly feed the data into a single pin (first come first serve). This infra change is also part of the preparation for changing the `block` interface to return a stream of output instead of a single output. Treating blocks as streams requires this capability. ### Changes 🏗️ * Update block run interface from returning `(output_name, output_data)` to `Generator[(output_name, output_data)]` * Removed `agent` term in the API, replace it with `graph` for consistency. * Reintroduced `AgentNodeExecutionInputOutput`. `AgentNodeExecution` input & output will be a list of `AgentNodeExecutionInputOutput` which describes the input & output data of its execution. Making an execution has 1-many relation to its input output data. * Propagating the relation and block interface change into the execution engine. --- .../autogpt_server/data/block.py | 45 ++-- .../autogpt_server/data/execution.py | 234 +++++++++++++----- .../autogpt_server/data/graph.py | 74 +++--- .../autogpt_server/data/schedule.py | 28 +-- .../autogpt_server/executor/manager.py | 212 ++++++++++------ .../autogpt_server/executor/scheduler.py | 23 +- .../autogpt_server/server/server.py | 96 +++---- .../autogpt_server/util/service.py | 7 +- rnd/autogpt_server/schema.prisma | 71 +++--- .../test/executor/test_manager.py | 37 ++- 10 files changed, 494 insertions(+), 333 deletions(-) diff --git a/rnd/autogpt_server/autogpt_server/data/block.py b/rnd/autogpt_server/autogpt_server/data/block.py index 4dbfcf354f71..d0f69fdc0e0b 100644 --- a/rnd/autogpt_server/autogpt_server/data/block.py +++ b/rnd/autogpt_server/autogpt_server/data/block.py @@ -1,9 +1,8 @@ import json -import jsonschema - from abc import ABC, abstractmethod -from typing import Any, ClassVar +from typing import Any, Generator, ClassVar +import jsonschema from prisma.models import AgentBlock from pydantic import BaseModel @@ -92,6 +91,12 @@ def validate_field(self, field_name: str, data: BlockData) -> str | None: except jsonschema.ValidationError as e: return str(e) + def get_fields(self) -> set[str]: + return set(self.jsonschema["properties"].keys()) + + +BlockOutput = Generator[tuple[str, Any], None, None] + class Block(ABC, BaseModel): @classmethod @@ -126,13 +131,15 @@ def output_schema(cls) -> BlockSchema: pass @abstractmethod - def run(self, input_data: BlockData) -> tuple[str, Any]: + def run(self, input_data: BlockData) -> BlockOutput: """ Run the block with the given input data. Args: input_data: The input data with the structure of input_schema. Returns: - The (output name, output data), matching the type in output_schema. + A Generator that yields (output_name, output_data). + output_name: One of the output name defined in Block's output_schema. + output_data: The data for the output_name, matching the defined schema. """ pass @@ -149,20 +156,18 @@ def to_dict(self): "outputSchema": self.output_schema.jsonschema, } - def execute(self, input_data: BlockData) -> tuple[str, Any]: + def execute(self, input_data: BlockData) -> BlockOutput: if error := self.input_schema.validate_data(input_data): raise ValueError( f"Unable to execute block with invalid input data: {error}" ) - output_name, output_data = self.run(input_data) - - if error := self.output_schema.validate_field(output_name, output_data): - raise ValueError( - f"Unable to execute block with invalid output data: {error}" - ) - - return output_name, output_data + for output_name, output_data in self.run(input_data): + if error := self.output_schema.validate_field(output_name, output_data): + raise ValueError( + f"Unable to execute block with invalid output data: {error}" + ) + yield output_name, output_data # ===================== Inline-Block Implementations ===================== # @@ -181,8 +186,8 @@ class ParrotBlock(Block): } ) - def run(self, input_data: BlockData) -> tuple[str, Any]: - return "output", input_data["input"] + def run(self, input_data: BlockData) -> BlockOutput: + yield "output", input_data["input"] class TextCombinerBlock(Block): @@ -200,8 +205,8 @@ class TextCombinerBlock(Block): } ) - def run(self, input_data: BlockData) -> tuple[str, Any]: - return "combined_text", input_data["format"].format( + def run(self, input_data: BlockData) -> BlockOutput: + yield "combined_text", input_data["format"].format( text1=input_data["text1"], text2=input_data["text2"], ) @@ -220,8 +225,8 @@ class PrintingBlock(Block): } ) - def run(self, input_data: BlockData) -> tuple[str, Any]: - return "status", "printed" + def run(self, input_data: BlockData) -> BlockOutput: + yield "status", "printed" # ======================= Block Helper Functions ======================= # diff --git a/rnd/autogpt_server/autogpt_server/data/execution.py b/rnd/autogpt_server/autogpt_server/data/execution.py index 77ae0e931288..2fb6a7a03ea4 100644 --- a/rnd/autogpt_server/autogpt_server/data/execution.py +++ b/rnd/autogpt_server/autogpt_server/data/execution.py @@ -1,23 +1,27 @@ import json +from collections import defaultdict from datetime import datetime from enum import Enum -from multiprocessing import Queue +from multiprocessing import Manager from typing import Any -from prisma.models import AgentNodeExecution +from prisma.models import ( + AgentGraphExecution, + AgentNodeExecution, + AgentNodeExecutionInputOutput, +) +from pydantic import BaseModel -from autogpt_server.data.db import BaseDbModel - -class Execution(BaseDbModel): - """Data model for an execution of an Agent""" - - run_id: str +class NodeExecution(BaseModel): + graph_exec_id: str + node_exec_id: str node_id: str data: dict[str, Any] class ExecutionStatus(str, Enum): + INCOMPLETE = "INCOMPLETE" QUEUED = "QUEUED" RUNNING = "RUNNING" COMPLETED = "COMPLETED" @@ -31,103 +35,205 @@ class ExecutionQueue: """ def __init__(self): - self.queue: Queue[Execution] = Queue() + self.queue = Manager().Queue() - def add(self, execution: Execution) -> Execution: + def add(self, execution: NodeExecution) -> NodeExecution: self.queue.put(execution) return execution - def get(self) -> Execution: + def get(self) -> NodeExecution: return self.queue.get() def empty(self) -> bool: return self.queue.empty() -class ExecutionResult(BaseDbModel): - run_id: str - execution_id: str +class ExecutionResult(BaseModel): + graph_exec_id: str + node_exec_id: str node_id: str status: ExecutionStatus - input_data: dict[str, Any] - output_name: str - output_data: Any - creation_time: datetime + input_data: dict[str, Any] # 1 input pin should consume exactly 1 data. + output_data: dict[str, list[Any]] # but 1 output pin can produce multiple output. + add_time: datetime + queue_time: datetime | None start_time: datetime | None end_time: datetime | None @staticmethod def from_db(execution: AgentNodeExecution): + input_data = defaultdict() + for data in execution.Input or []: + input_data[data.name] = json.loads(data.data) + + output_data = defaultdict(list) + for data in execution.Output or []: + output_data[data.name].append(json.loads(data.data)) + return ExecutionResult( - run_id=execution.executionId, + graph_exec_id=execution.agentGraphExecutionId, + node_exec_id=execution.id, node_id=execution.agentNodeId, - execution_id=execution.id, status=ExecutionStatus(execution.executionStatus), - input_data=json.loads(execution.inputData or "{}"), - output_name=execution.outputName or "", - output_data=json.loads(execution.outputData or "{}"), - creation_time=execution.creationTime, - start_time=execution.startTime, - end_time=execution.endTime, + input_data=input_data, + output_data=output_data, + add_time=execution.addedTime, + queue_time=execution.queuedTime, + start_time=execution.startedTime, + end_time=execution.endedTime, ) # --------------------- Model functions --------------------- # - -async def enqueue_execution(execution: Execution) -> None: - await AgentNodeExecution.prisma().create( +async def create_graph_execution( + graph_id: str, + node_ids: list[str], + data: dict[str, Any] +) -> tuple[str, list[ExecutionResult]]: + """ + Create a new AgentGraphExecution record. + Returns: + The id of the AgentGraphExecution and the list of ExecutionResult for each node. + """ + result = await AgentGraphExecution.prisma().create( data={ - "id": execution.id, - "executionId": execution.run_id, - "agentNodeId": execution.node_id, - "executionStatus": ExecutionStatus.QUEUED, - "inputData": json.dumps(execution.data), - "creationTime": datetime.now(), - } + "agentGraphId": graph_id, + "AgentNodeExecutions": { + "create": [ # type: ignore + { + "agentNodeId": node_id, + "executionStatus": ExecutionStatus.INCOMPLETE, + "Input": { + "create": [ + {"name": name, "data": json.dumps(data)} + for name, data in data.items() + ] + }, + } + for node_id in node_ids + ] + }, + }, + include={"AgentNodeExecutions": True} ) + return result.id, [ + ExecutionResult.from_db(execution) + for execution in result.AgentNodeExecutions or [] + ] -async def start_execution(exec_id: str) -> None: - await AgentNodeExecution.prisma().update( - where={"id": exec_id}, - data={ - "executionStatus": ExecutionStatus.RUNNING, - "startTime": datetime.now(), + +async def upsert_execution_input( + node_id: str, + graph_exec_id: str, + input_name: str, + data: Any, +) -> str: + """ + Insert AgentNodeExecutionInputOutput record for as one of AgentNodeExecution.Input. + If there is no AgentNodeExecution that has no `input_name` as input, create new one. + + Returns: + The id of the created or existing AgentNodeExecution. + """ + existing_execution = await AgentNodeExecution.prisma().find_first( + where={ # type: ignore + "agentNodeId": node_id, + "agentGraphExecutionId": graph_exec_id, + "Input": {"every": {"name": {"not": input_name}}}, }, + order={"addedTime": "asc"}, ) + json_data = json.dumps(data) + + if existing_execution: + print(f"Adding input {input_name}={data} to execution #{existing_execution.id}") + await AgentNodeExecutionInputOutput.prisma().create( + data={ + "name": input_name, + "data": json_data, + "referencedByInputExecId": existing_execution.id, + } + ) + return existing_execution.id + + else: + print(f"Creating new execution for input {input_name}={data}") + result = await AgentNodeExecution.prisma().create( + data={ + "agentNodeId": node_id, + "agentGraphExecutionId": graph_exec_id, + "executionStatus": ExecutionStatus.INCOMPLETE, + "Input": {"create": {"name": input_name, "data": json_data}}, + } + ) + return result.id -async def complete_execution(exec_id: str, output: tuple[str, Any]) -> None: - output_name, output_data = output - - await AgentNodeExecution.prisma().update( - where={"id": exec_id}, +async def upsert_execution_output( + node_exec_id: str, + output_name: str, + output_data: Any, +) -> None: + """ + Insert AgentNodeExecutionInputOutput record for as one of AgentNodeExecution.Output. + """ + await AgentNodeExecutionInputOutput.prisma().create( data={ - "executionStatus": ExecutionStatus.COMPLETED, - "outputName": output_name, - "outputData": json.dumps(output_data), - "endTime": datetime.now(), - }, + "name": output_name, + "data": json.dumps(output_data), + "referencedByOutputExecId": node_exec_id, + } ) -async def fail_execution(exec_id: str, error: Exception) -> None: - await AgentNodeExecution.prisma().update( - where={"id": exec_id}, - data={ - "executionStatus": ExecutionStatus.FAILED, - "outputName": "error", - "outputData": str(error), - "endTime": datetime.now(), - }, +async def update_execution_status(node_exec_id: str, status: ExecutionStatus) -> None: + now = datetime.now() + data = { + **({"executionStatus": status}), + **({"queuedTime": now} if status == ExecutionStatus.QUEUED else {}), + **({"startedTime": now} if status == ExecutionStatus.RUNNING else {}), + **({"endedTime": now} if status == ExecutionStatus.FAILED else {}), + **({"endedTime": now} if status == ExecutionStatus.COMPLETED else {}), + } + + count = await AgentNodeExecution.prisma().update( + where={"id": node_exec_id}, + data=data # type: ignore ) + if count == 0: + raise ValueError(f"Execution {node_exec_id} not found.") -async def get_executions(run_id: str) -> list[ExecutionResult]: +async def get_executions(graph_exec_id: str) -> list[ExecutionResult]: executions = await AgentNodeExecution.prisma().find_many( - where={"executionId": run_id}, - order={"startTime": "asc"}, + where={"agentGraphExecutionId": graph_exec_id}, + include={"Input": True, "Output": True}, + order={"addedTime": "asc"}, ) res = [ExecutionResult.from_db(execution) for execution in executions] return res + + +async def get_node_execution_input(node_exec_id: str) -> dict[str, Any]: + """ + Get execution node input data from the previous node execution result. + + Returns: + dictionary of input data, key is the input name, value is the input data. + """ + execution = await AgentNodeExecution.prisma().find_unique_or_raise( + where={"id": node_exec_id}, + include={ + "Input": True, + "AgentNode": True, + }, + ) + if not execution.AgentNode: + raise ValueError(f"Node {execution.agentNodeId} not found.") + + exec_input = json.loads(execution.AgentNode.constantInput) + for input_data in execution.Input or []: + exec_input[input_data.name] = json.loads(input_data.data) + return exec_input diff --git a/rnd/autogpt_server/autogpt_server/data/graph.py b/rnd/autogpt_server/autogpt_server/data/graph.py index 103da14d6d88..8ee9f3035060 100644 --- a/rnd/autogpt_server/autogpt_server/data/graph.py +++ b/rnd/autogpt_server/autogpt_server/data/graph.py @@ -1,20 +1,30 @@ import asyncio import json import uuid - from typing import Any -from prisma.models import AgentGraph, AgentNode, AgentNodeExecution, AgentNodeLink + +from prisma.models import AgentGraph, AgentNode, AgentNodeLink +from pydantic import BaseModel from autogpt_server.data.db import BaseDbModel +class Link(BaseModel): + name: str + node_id: str + + def __init__(self, name: str, node_id: str): + super().__init__(name=name, node_id=node_id) + + def __iter__(self): + return iter((self.name, self.node_id)) + + class Node(BaseDbModel): block_id: str input_default: dict[str, Any] = {} # dict[input_name, default_value] - input_nodes: dict[str, str] = {} # dict[input_name, node_id] - # TODO: Make it `dict[str, list[str]]`, output can be connected to multiple blocks. - # Other option is to use an edge-list, but it will complicate the rest code. - output_nodes: dict[str, str] = {} # dict[output_name, node_id] + input_nodes: list[Link] = [] # dict[input_name, node_id] + output_nodes: list[Link] = [] # dict[output_name, node_id] metadata: dict[str, Any] = {} @staticmethod @@ -26,14 +36,20 @@ def from_db(node: AgentNode): id=node.id, block_id=node.AgentBlock.id, input_default=json.loads(node.constantInput), - input_nodes={v.sinkName: v.agentNodeSourceId for v in node.Input or []}, - output_nodes={v.sourceName: v.agentNodeSinkId for v in node.Output or []}, + input_nodes=[ + Link(v.sinkName, v.agentNodeSourceId) + for v in node.Input or [] + ], + output_nodes=[ + Link(v.sourceName, v.agentNodeSinkId) + for v in node.Output or [] + ], metadata=json.loads(node.metadata), ) def connect(self, node: "Node", source_name: str, sink_name: str): - self.output_nodes[source_name] = node.id - node.input_nodes[sink_name] = self.id + self.output_nodes.append(Link(source_name, node.id)) + node.input_nodes.append(Link(sink_name, self.id)) class Graph(BaseDbModel): @@ -85,41 +101,7 @@ async def get_graph(graph_id: str) -> Graph | None: return Graph.from_db(graph) if graph else None -async def get_node_input(node: Node, exec_id: str) -> dict[str, Any]: - """ - Get execution node input data from the previous node execution result. - Args: - node: The execution node. - exec_id: The execution ID. - Returns: - dictionary of input data, key is the input name, value is the input data. - """ - query = await AgentNodeExecution.prisma().find_many( - where={ # type: ignore - "executionId": exec_id, - "agentNodeId": {"in": list(node.input_nodes.values())}, - "executionStatus": "COMPLETED", - }, - distinct=["agentNodeId"], # type: ignore - order={"creationTime": "desc"}, - ) - - latest_executions: dict[str, AgentNodeExecution] = { - execution.agentNodeId: execution for execution in query - } - - return { - **node.input_default, - **{ - name: json.loads(latest_executions[node_id].outputData or "{}") - for name, node_id in node.input_nodes.items() - if node_id in latest_executions and latest_executions[node_id].outputData - }, - } - - async def create_graph(graph: Graph) -> Graph: - await AgentGraph.prisma().create( data={ "id": graph.id, @@ -142,12 +124,12 @@ async def create_graph(graph: Graph) -> Graph: edge_source_names = { (source_node.id, sink_node_id): output_name for source_node in graph.nodes - for output_name, sink_node_id in source_node.output_nodes.items() + for output_name, sink_node_id in source_node.output_nodes } edge_sink_names = { (source_node_id, sink_node.id): input_name for sink_node in graph.nodes - for input_name, source_node_id in sink_node.input_nodes.items() + for input_name, source_node_id in sink_node.input_nodes } # TODO: replace bulk creation using create_many diff --git a/rnd/autogpt_server/autogpt_server/data/schedule.py b/rnd/autogpt_server/autogpt_server/data/schedule.py index d657300b8a64..b1a344ff9fb0 100644 --- a/rnd/autogpt_server/autogpt_server/data/schedule.py +++ b/rnd/autogpt_server/autogpt_server/data/schedule.py @@ -2,14 +2,13 @@ from datetime import datetime from typing import Optional, Any -from prisma.models import AgentExecutionSchedule +from prisma.models import AgentGraphExecutionSchedule from autogpt_server.data.db import BaseDbModel class ExecutionSchedule(BaseDbModel): - id: str - agent_id: str + graph_id: str schedule: str is_enabled: bool input_data: dict[str, Any] @@ -25,10 +24,10 @@ def __init__( super().__init__(is_enabled=is_enabled, **kwargs) @staticmethod - def from_db(schedule: AgentExecutionSchedule): + def from_db(schedule: AgentGraphExecutionSchedule): return ExecutionSchedule( id=schedule.id, - agent_id=schedule.agentGraphId, + graph_id=schedule.agentGraphId, schedule=schedule.schedule, is_enabled=schedule.isEnabled, last_updated=schedule.lastUpdated.replace(tzinfo=None), @@ -37,7 +36,7 @@ def from_db(schedule: AgentExecutionSchedule): async def get_active_schedules(last_fetch_time: datetime) -> list[ExecutionSchedule]: - query = AgentExecutionSchedule.prisma().find_many( + query = AgentGraphExecutionSchedule.prisma().find_many( where={ "isEnabled": True, "lastUpdated": {"gt": last_fetch_time} @@ -51,17 +50,17 @@ async def get_active_schedules(last_fetch_time: datetime) -> list[ExecutionSched async def disable_schedule(schedule_id: str): - await AgentExecutionSchedule.prisma().update( + await AgentGraphExecutionSchedule.prisma().update( where={"id": schedule_id}, data={"isEnabled": False} ) -async def get_schedules(agent_id: str) -> list[ExecutionSchedule]: - query = AgentExecutionSchedule.prisma().find_many( +async def get_schedules(graph_id: str) -> list[ExecutionSchedule]: + query = AgentGraphExecutionSchedule.prisma().find_many( where={ "isEnabled": True, - "agentGraphId": agent_id, + "agentGraphId": graph_id, }, ) return [ @@ -70,20 +69,21 @@ async def get_schedules(agent_id: str) -> list[ExecutionSchedule]: ] -async def add_schedule(schedule: ExecutionSchedule): - await AgentExecutionSchedule.prisma().create( +async def add_schedule(schedule: ExecutionSchedule) -> ExecutionSchedule: + obj = await AgentGraphExecutionSchedule.prisma().create( data={ "id": schedule.id, - "agentGraphId": schedule.agent_id, + "agentGraphId": schedule.graph_id, "schedule": schedule.schedule, "isEnabled": schedule.is_enabled, "inputData": json.dumps(schedule.input_data), } ) + return ExecutionSchedule.from_db(obj) async def update_schedule(schedule_id: str, is_enabled: bool): - await AgentExecutionSchedule.prisma().update( + await AgentGraphExecutionSchedule.prisma().update( where={"id": schedule_id}, data={"isEnabled": is_enabled} ) diff --git a/rnd/autogpt_server/autogpt_server/executor/manager.py b/rnd/autogpt_server/autogpt_server/executor/manager.py index 5296f0c8635b..7ebb375694f7 100644 --- a/rnd/autogpt_server/autogpt_server/executor/manager.py +++ b/rnd/autogpt_server/autogpt_server/executor/manager.py @@ -1,30 +1,35 @@ import asyncio import logging -import uuid from concurrent.futures import ProcessPoolExecutor -from typing import Optional, Any +from typing import Any, Coroutine, Generator, TypeVar from autogpt_server.data import db from autogpt_server.data.block import Block, get_block -from autogpt_server.data.graph import Node, get_node, get_node_input, get_graph from autogpt_server.data.execution import ( - Execution, + get_node_execution_input, + create_graph_execution, + update_execution_status as execution_update, + upsert_execution_output, + upsert_execution_input, + NodeExecution as Execution, + ExecutionStatus, ExecutionQueue, - enqueue_execution, - complete_execution, - fail_execution, - start_execution, ) +from autogpt_server.data.graph import Node, get_node, get_graph from autogpt_server.util.service import AppService, expose logger = logging.getLogger(__name__) -def get_log_prefix(run_id: str, exec_id: str, block_name: str = "-"): - return f"[ExecutionManager] [graph-{run_id}|node-{exec_id}|{block_name}]" +def get_log_prefix(graph_eid: str, node_eid: str, block_name: str = "-"): + return f"[ExecutionManager] [graph-{graph_eid}|node-{node_eid}|{block_name}]" -def execute_node(loop: asyncio.AbstractEventLoop, data: Execution) -> Execution | None: +T = TypeVar("T") +ExecutionStream = Generator[Execution, None, None] + + +def execute_node(loop: asyncio.AbstractEventLoop, data: Execution) -> ExecutionStream: """ Execute a node in the graph. This will trigger a block execution on a node, persist the execution result, and return the subsequent node to be executed. @@ -36,57 +41,102 @@ def execute_node(loop: asyncio.AbstractEventLoop, data: Execution) -> Execution Returns: The subsequent node to be enqueued, or None if there is no subsequent node. """ - run_id = data.run_id - exec_id = data.id + graph_exec_id = data.graph_exec_id + node_exec_id = data.node_exec_id exec_data = data.data node_id = data.node_id asyncio.set_event_loop(loop) - wait = lambda f: loop.run_until_complete(f) - node: Optional[Node] = wait(get_node(node_id)) + def wait(f: Coroutine[T, Any, T]) -> T: + return loop.run_until_complete(f) + + node = wait(get_node(node_id)) if not node: logger.error(f"Node {node_id} not found.") - return None + return - node_block: Optional[Block] = wait(get_block(node.block_id)) + node_block = wait(get_block(node.block_id)) if not node_block: logger.error(f"Block {node.block_id} not found.") - return None + return # Execute the node - prefix = get_log_prefix(run_id, exec_id, node_block.name) + prefix = get_log_prefix(graph_exec_id, node_exec_id, node_block.name) logger.warning(f"{prefix} execute with input:\n`{exec_data}`") - wait(start_execution(exec_id)) + wait(execution_update(node_exec_id, ExecutionStatus.RUNNING)) try: - output_name, output_data = node_block.execute(exec_data) - logger.warning(f"{prefix} executed with output [{output_name}]:`{output_data}`") - wait(complete_execution(exec_id, (output_name, output_data))) + for output_name, output_data in node_block.execute(exec_data): + logger.warning(f"{prefix} Executed, output [{output_name}]:`{output_data}`") + wait(execution_update(node_exec_id, ExecutionStatus.COMPLETED)) + wait(upsert_execution_output(node_exec_id, output_name, output_data)) + + for execution in enqueue_next_nodes( + loop, node, output_name, output_data, graph_exec_id + ): + yield execution except Exception as e: logger.exception(f"{prefix} failed with error: %s", e) - wait(fail_execution(exec_id, e)) + wait(execution_update(node_exec_id, ExecutionStatus.FAILED)) + wait(upsert_execution_output(node_exec_id, "error", str(e))) raise e - # Try to enqueue next eligible nodes - if output_name not in node.output_nodes: - logger.error(f"{prefix} Output [{output_name}] has no subsequent node.") - return None - next_node_id = node.output_nodes[output_name] - next_node: Optional[Node] = wait(get_node(next_node_id)) - if not next_node: - logger.error(f"{prefix} Error, next node {next_node_id} not found.") - return None +def enqueue_next_nodes( + loop: asyncio.AbstractEventLoop, + node: Node, + output_name: str, + output_data: Any, + graph_exec_id: str, +) -> list[Execution]: + def wait(f: Coroutine[T, Any, T]) -> T: + return loop.run_until_complete(f) - next_node_input: dict[str, Any] = wait(get_node_input(next_node, run_id)) - is_valid, validation_resp = wait(validate_exec(next_node, next_node_input)) - if not is_valid: - logger.warning(f"{prefix} Skipped {next_node_id}: {validation_resp}") - return None + prefix = get_log_prefix(graph_exec_id, node.id) + node_id = node.id - logger.warning(f"{prefix} Enqueue next node {next_node_id}-{validation_resp}") - return Execution(run_id=run_id, node_id=next_node_id, data=next_node_input) + # Try to enqueue next eligible nodes + next_node_ids = [nid for name, nid in node.output_nodes if name == output_name] + if not next_node_ids: + logger.error(f"{prefix} Output [{output_name}] has no subsequent node.") + return [] + + def validate_node_execution(next_node_id: str): + next_node = wait(get_node(next_node_id)) + if not next_node: + logger.error(f"{prefix} Error, next node {next_node_id} not found.") + return + + next_node_input_name = next( + name for name, nid in next_node.input_nodes if nid == node_id + ) + next_node_exec_id = wait(upsert_execution_input( + node_id=next_node_id, + graph_exec_id=graph_exec_id, + input_name=next_node_input_name, + data=output_data + )) + + next_node_input = wait(get_node_execution_input(next_node_exec_id)) + is_valid, validation_resp = wait(validate_exec(next_node, next_node_input)) + if not is_valid: + logger.warning(f"{prefix} Skipped {next_node_id}: {validation_resp}") + return + + logger.warning(f"{prefix} Enqueue next node {next_node_id}-{validation_resp}") + return Execution( + graph_exec_id=graph_exec_id, + node_exec_id=next_node_exec_id, + node_id=next_node_id, + data=next_node_input + ) + + executions = [] + for nid in next_node_ids: + if execution := validate_node_execution(nid): + executions.append(execution) + return executions async def validate_exec(node: Node, data: dict[str, Any]) -> tuple[bool, str]: @@ -105,10 +155,12 @@ async def validate_exec(node: Node, data: dict[str, Any]) -> tuple[bool, str]: if not node_block: return False, f"Block for {node.block_id} not found." - if not set(node.input_nodes).issubset(data): - return False, f"Input data missing: {set(node.input_nodes) - set(data)}" + input_fields = node_block.input_schema.get_fields() + if not input_fields.issubset(data): + return False, f"Input data missing: {input_fields - set(data)}" if error := node_block.input_schema.validate_data(data): + logger.error("Input value doesn't match schema: %s", error) return False, f"Input data doesn't match {node_block.name}: {error}" return True, node_block.name @@ -123,16 +175,16 @@ def on_executor_start(cls): cls.loop.run_until_complete(db.connect()) @classmethod - def on_start_execution(cls, data: Execution) -> Optional[Execution | None]: - """ - A synchronous version of `execute_node`, to be used in the ProcessPoolExecutor. - """ - prefix = get_log_prefix(data.run_id, data.id) + def on_start_execution(cls, q: ExecutionQueue, data: Execution) -> bool: + prefix = get_log_prefix(data.graph_exec_id, data.node_exec_id) try: logger.warning(f"{prefix} Start execution") - return execute_node(cls.loop, data) + for execution in execute_node(cls.loop, data): + q.add(execution) + return True except Exception as e: - logger.error(f"{prefix} Error: {e}") + logger.exception(f"{prefix} Error: {e}") + return False class ExecutionManager(AppService): @@ -142,59 +194,63 @@ def __init__(self, pool_size: int): self.queue = ExecutionQueue() def run_service(self): - def on_complete_execution(f: asyncio.Future[Execution | None]): - exception = f.exception() - if exception: - logger.exception("Error during execution!! %s", exception) - return exception - - execution = f.result() - if execution: - return self.add_node_execution(execution) - - return None - with ProcessPoolExecutor( max_workers=self.pool_size, initializer=Executor.on_executor_start, ) as executor: logger.warning(f"Execution manager started with {self.pool_size} workers.") while True: - future = executor.submit( + executor.submit( Executor.on_start_execution, - self.queue.get() + self.queue, + self.queue.get(), ) - future.add_done_callback(on_complete_execution) # type: ignore @expose def add_execution(self, graph_id: str, data: dict[str, Any]) -> dict: - run_id = str(uuid.uuid4()) - - agent = self.run_and_wait(get_graph(graph_id)) - if not agent: - raise Exception(f"Agent #{graph_id} not found.") + graph = self.run_and_wait(get_graph(graph_id)) + if not graph: + raise Exception(f"Graph #{graph_id} not found.") # Currently, there is no constraint on the number of root nodes in the graph. - for node in agent.starting_nodes: - valid, error = self.run_and_wait(validate_exec(node, data)) + for node in graph.starting_nodes: + input_data = {**node.input_default, **data} + valid, error = self.run_and_wait(validate_exec(node, input_data)) if not valid: raise Exception(error) + graph_exec_id, node_execs = self.run_and_wait(create_graph_execution( + graph_id=graph_id, + node_ids=[node.id for node in graph.starting_nodes], + data=data + )) + executions = [] - for node in agent.starting_nodes: - exec_id = self.add_node_execution( - Execution(run_id=run_id, node_id=node.id, data=data) + for node_exec in node_execs: + input_data = self.run_and_wait( + get_node_execution_input(node_exec.node_exec_id) + ) + self.add_node_execution( + Execution( + graph_exec_id=node_exec.graph_exec_id, + node_exec_id=node_exec.node_exec_id, + node_id=node_exec.node_id, + data=input_data, + ) ) executions.append({ - "exec_id": exec_id, - "node_id": node.id, + "id": node_exec.node_exec_id, + "node_id": node_exec.node_id, }) return { - "run_id": run_id, + "id": graph_exec_id, "executions": executions, } def add_node_execution(self, execution: Execution) -> Execution: - self.run_and_wait(enqueue_execution(execution)) + self.run_and_wait(execution_update( + execution.node_exec_id, + ExecutionStatus.QUEUED + )) return self.queue.add(execution) diff --git a/rnd/autogpt_server/autogpt_server/executor/scheduler.py b/rnd/autogpt_server/autogpt_server/executor/scheduler.py index a82619af7d34..c547d2d01231 100644 --- a/rnd/autogpt_server/autogpt_server/executor/scheduler.py +++ b/rnd/autogpt_server/autogpt_server/executor/scheduler.py @@ -45,20 +45,20 @@ def __refresh_jobs_from_db(self, scheduler: BackgroundScheduler): log(f"Adding recurring job {schedule.id}: {schedule.schedule}") scheduler.add_job( - self.__execute_agent, + self.__execute_graph, CronTrigger.from_crontab(schedule.schedule), id=schedule.id, - args=[schedule.agent_id, schedule.input_data], + args=[schedule.graph_id, schedule.input_data], replace_existing=True, ) - def __execute_agent(self, agent_id: str, input_data: dict): + def __execute_graph(self, graph_id: str, input_data: dict): try: - log(f"Executing recurring job for agent #{agent_id}") + log(f"Executing recurring job for graph #{graph_id}") execution_manager = self.execution_manager_client - execution_manager.add_execution(agent_id, input_data) + execution_manager.add_execution(graph_id, input_data) except Exception as e: - logger.error(f"Error executing agent {agent_id}: {e}") + logger.exception(f"Error executing graph {graph_id}: {e}") @expose def update_schedule(self, schedule_id: str, is_enabled: bool) -> str: @@ -66,17 +66,16 @@ def update_schedule(self, schedule_id: str, is_enabled: bool) -> str: return schedule_id @expose - def add_execution_schedule(self, agent_id: str, cron: str, input_data: dict) -> str: + def add_execution_schedule(self, graph_id: str, cron: str, input_data: dict) -> str: schedule = model.ExecutionSchedule( - agent_id=agent_id, + graph_id=graph_id, schedule=cron, input_data=input_data, ) - self.run_and_wait(model.add_schedule(schedule)) - return schedule.id + return self.run_and_wait(model.add_schedule(schedule)).id @expose - def get_execution_schedules(self, agent_id: str) -> dict[str, str]: - query = model.get_schedules(agent_id) + def get_execution_schedules(self, graph_id: str) -> dict[str, str]: + query = model.get_schedules(graph_id) schedules: list[model.ExecutionSchedule] = self.run_and_wait(query) return {v.id: v.schedule for v in schedules} diff --git a/rnd/autogpt_server/autogpt_server/server/server.py b/rnd/autogpt_server/autogpt_server/server/server.py index 7e787cf5b575..40a87c2e7691 100644 --- a/rnd/autogpt_server/autogpt_server/server/server.py +++ b/rnd/autogpt_server/autogpt_server/server/server.py @@ -4,7 +4,14 @@ from contextlib import asynccontextmanager from fastapi import APIRouter, FastAPI, HTTPException -from autogpt_server.data import db, execution, graph, block +from autogpt_server.data import db, execution, block +from autogpt_server.data.graph import ( + create_graph, + get_graph, + get_graph_ids, + Graph, + Link, +) from autogpt_server.executor import ExecutionManager, ExecutionScheduler from autogpt_server.util.process import AppProcess from autogpt_server.util.service import get_service_client @@ -34,46 +41,46 @@ def run(self): router = APIRouter() router.add_api_route( path="/blocks", - endpoint=self.get_agent_blocks, + endpoint=self.get_graph_blocks, methods=["GET"], ) router.add_api_route( - path="/agents", - endpoint=self.get_agents, + path="/graphs", + endpoint=self.get_graphs, methods=["GET"], ) router.add_api_route( - path="/agents/{agent_id}", - endpoint=self.get_agent, + path="/graphs/{graph_id}", + endpoint=self.get_graph, methods=["GET"], ) router.add_api_route( - path="/agents", - endpoint=self.create_agent, + path="/graphs", + endpoint=self.create_new_graph, methods=["POST"], ) router.add_api_route( - path="/agents/{agent_id}/execute", - endpoint=self.execute_agent, + path="/graphs/{graph_id}/execute", + endpoint=self.execute_graph, methods=["POST"], ) router.add_api_route( - path="/agents/{agent_id}/executions/{run_id}", + path="/graphs/{graph_id}/executions/{run_id}", endpoint=self.get_executions, methods=["GET"], ) router.add_api_route( - path="/agents/{agent_id}/schedules", - endpoint=self.schedule_agent, + path="/graphs/{graph_id}/schedules", + endpoint=self.create_schedule, methods=["POST"], ) router.add_api_route( - path="/agents/{agent_id}/schedules", + path="/graphs/{graph_id}/schedules", endpoint=self.get_execution_schedules, methods=["GET"], ) router.add_api_route( - path="/agents/schedules/{schedule_id}", + path="/graphs/schedules/{schedule_id}", endpoint=self.update_schedule, methods=["PUT"], ) @@ -89,52 +96,51 @@ def execution_manager_client(self) -> ExecutionManager: def execution_scheduler_client(self) -> ExecutionScheduler: return get_service_client(ExecutionScheduler) - async def get_agent_blocks(self) -> list[dict]: + async def get_graph_blocks(self) -> list[dict]: return [v.to_dict() for v in await block.get_blocks()] - async def get_agents(self) -> list[str]: - return await graph.get_graph_ids() + async def get_graphs(self) -> list[str]: + return await get_graph_ids() - async def get_agent(self, agent_id: str) -> graph.Graph: - agent = await graph.get_graph(agent_id) - if not agent: - raise HTTPException(status_code=404, detail=f"Agent #{agent_id} not found.") + async def get_graph(self, graph_id: str) -> Graph: + graph = await get_graph(graph_id) + if not graph: + raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.") + return graph - return agent - - async def create_agent(self, agent: graph.Graph) -> graph.Graph: - agent.id = str(uuid.uuid4()) - - id_map = {node.id: str(uuid.uuid4()) for node in agent.nodes} - for node in agent.nodes: + async def create_new_graph(self, graph: Graph) -> Graph: + # TODO: replace uuid generation here to DB generated uuids. + graph.id = str(uuid.uuid4()) + id_map = {node.id: str(uuid.uuid4()) for node in graph.nodes} + for node in graph.nodes: node.id = id_map[node.id] - node.input_nodes = {k: id_map[v] for k, v in node.input_nodes.items()} - node.output_nodes = {k: id_map[v] for k, v in node.output_nodes.items()} + node.input_nodes = [Link(k, id_map[v]) for k, v in node.input_nodes] + node.output_nodes = [Link(k, id_map[v]) for k, v in node.output_nodes] - return await graph.create_graph(agent) + return await create_graph(graph) - async def execute_agent(self, agent_id: str, node_input: dict) -> dict: + async def execute_graph(self, graph_id: str, node_input: dict) -> dict: try: - return self.execution_manager_client.add_execution(agent_id, node_input) + return self.execution_manager_client.add_execution(graph_id, node_input) except Exception as e: msg = e.__str__().encode().decode('unicode_escape') raise HTTPException(status_code=400, detail=msg) async def get_executions( - self, agent_id: str, run_id: str) -> list[execution.ExecutionResult]: - agent = await graph.get_graph(agent_id) - if not agent: - raise HTTPException(status_code=404, detail=f"Agent #{agent_id} not found.") + self, graph_id: str, run_id: str) -> list[execution.ExecutionResult]: + graph = await get_graph(graph_id) + if not graph: + raise HTTPException(status_code=404, detail=f"Agent #{graph_id} not found.") return await execution.get_executions(run_id) - async def schedule_agent(self, agent_id: str, cron: str, input_data: dict) -> dict: - agent = await graph.get_graph(agent_id) - if not agent: - raise HTTPException(status_code=404, detail=f"Agent #{agent_id} not found.") + async def create_schedule(self, graph_id: str, cron: str, input_data: dict) -> dict: + graph = await get_graph(graph_id) + if not graph: + raise HTTPException(status_code=404, detail=f"Graph #{graph_id} not found.") execution_scheduler = self.execution_scheduler_client return { - "id": execution_scheduler.add_execution_schedule(agent_id, cron, input_data) + "id": execution_scheduler.add_execution_schedule(graph_id, cron, input_data) } def update_schedule(self, schedule_id: str, input_data: dict) -> dict: @@ -143,6 +149,6 @@ def update_schedule(self, schedule_id: str, input_data: dict) -> dict: execution_scheduler.update_schedule(schedule_id, is_enabled) return {"id": schedule_id} - def get_execution_schedules(self, agent_id: str) -> dict[str, str]: + def get_execution_schedules(self, graph_id: str) -> dict[str, str]: execution_scheduler = self.execution_scheduler_client - return execution_scheduler.get_execution_schedules(agent_id) + return execution_scheduler.get_execution_schedules(graph_id) diff --git a/rnd/autogpt_server/autogpt_server/util/service.py b/rnd/autogpt_server/autogpt_server/util/service.py index 5cfc3d220ff2..e954eb089c1a 100644 --- a/rnd/autogpt_server/autogpt_server/util/service.py +++ b/rnd/autogpt_server/autogpt_server/util/service.py @@ -15,6 +15,7 @@ logger = logging.getLogger(__name__) conn_retry = retry(stop=stop_after_delay(5), wait=wait_exponential(multiplier=0.1)) +T = TypeVar("T") def expose(func: Callable) -> Callable: @@ -23,7 +24,7 @@ def wrapper(*args, **kwargs): return func(*args, **kwargs) except Exception as e: msg = f"Error in {func.__name__}: {e.__str__()}" - logger.error(msg) + logger.exception(msg) raise Exception(msg, e) return pyro.expose(wrapper) @@ -51,10 +52,10 @@ def run_service(self): while True: time.sleep(10) - def run_async(self, coro: Coroutine): + def run_async(self, coro: Coroutine[T, Any, T]): return asyncio.run_coroutine_threadsafe(coro, self.shared_event_loop) - def run_and_wait(self, coro: Coroutine): + def run_and_wait(self, coro: Coroutine[T, Any, T]) -> T: future = self.run_async(coro) return future.result() diff --git a/rnd/autogpt_server/schema.prisma b/rnd/autogpt_server/schema.prisma index dd049b6c76f6..fa2db7cd4608 100644 --- a/rnd/autogpt_server/schema.prisma +++ b/rnd/autogpt_server/schema.prisma @@ -11,23 +11,24 @@ generator client { // This model describes the Agent Graph/Flow (Multi Agent System). model AgentGraph { - id String @id + id String @id @default(uuid()) name String? description String? - AgentNodes AgentNode[] @relation("AgentGraphNodes") - AgentExecutionSchedule AgentExecutionSchedule[] + AgentNodes AgentNode[] + AgentGraphExecution AgentGraphExecution[] + AgentGraphExecutionSchedule AgentGraphExecutionSchedule[] } // This model describes a single node in the Agent Graph/Flow (Multi Agent System). model AgentNode { - id String @id + id String @id @default(uuid()) agentBlockId String AgentBlock AgentBlock @relation(fields: [agentBlockId], references: [id]) agentGraphId String - AgentGraph AgentGraph @relation("AgentGraphNodes", fields: [agentGraphId], references: [id]) + AgentGraph AgentGraph @relation(fields: [agentGraphId], references: [id]) // List of consumed input, that the parent node should provide. Input AgentNodeLink[] @relation("AgentNodeSink") @@ -46,7 +47,7 @@ model AgentNode { // This model describes the link between two AgentNodes. model AgentNodeLink { - id String @id + id String @id @default(uuid()) // Output of a node is connected to the source of the link. agentNodeSourceId String @@ -61,7 +62,7 @@ model AgentNodeLink { // This model describes a component that will be executed by the AgentNode. model AgentBlock { - id String @id + id String @id @default(uuid()) name String @unique // We allow a block to have multiple types of input & output. @@ -73,45 +74,55 @@ model AgentBlock { ReferencedByAgentNode AgentNode[] } +// This model describes the execution of an AgentGraph. +model AgentGraphExecution { + id String @id @default(uuid()) + + agentGraphId String + AgentGraph AgentGraph @relation(fields: [agentGraphId], references: [id]) + + AgentNodeExecutions AgentNodeExecution[] +} + // This model describes the execution of an AgentNode. model AgentNodeExecution { - id String @id - executionId String + id String @id @default(uuid()) + + agentGraphExecutionId String + AgentGraphExecution AgentGraphExecution @relation(fields: [agentGraphExecutionId], references: [id]) agentNodeId String AgentNode AgentNode @relation(fields: [agentNodeId], references: [id]) - inputData String? - inputFiles FileDefinition[] @relation("InputFiles") - outputName String? - outputData String? - outputFiles FileDefinition[] @relation("OutputFiles") + Input AgentNodeExecutionInputOutput[] @relation("AgentNodeExecutionInput") + Output AgentNodeExecutionInputOutput[] @relation("AgentNodeExecutionOutput") // sqlite does not support enum - // enum Status { QUEUED, RUNNING, SUCCESS, FAILED } + // enum Status { INCOMPLETE, QUEUED, RUNNING, SUCCESS, FAILED } executionStatus String - creationTime DateTime - startTime DateTime? - endTime DateTime? + addedTime DateTime @default(now()) + queuedTime DateTime? + startedTime DateTime? + endedTime DateTime? } -// This model describes a file that can be used as input/output of an AgentNodeExecution. -model FileDefinition { - id String @id - path String - metadata String? // JSON serialized object - mimeType String? - size Int? - hash String? - encoding String? +// This model describes the output of an AgentNodeExecution. +model AgentNodeExecutionInputOutput { + id String @id @default(uuid()) + + name String + data String + time DateTime @default(now()) // Prisma requires explicit back-references. - ReferencedByInputFiles AgentNodeExecution[] @relation("InputFiles") - ReferencedByOutputFiles AgentNodeExecution[] @relation("OutputFiles") + referencedByInputExecId String? + ReferencedByInputExec AgentNodeExecution? @relation("AgentNodeExecutionInput", fields: [referencedByInputExecId], references: [id]) + referencedByOutputExecId String? + ReferencedByOutputExec AgentNodeExecution? @relation("AgentNodeExecutionOutput", fields: [referencedByOutputExecId], references: [id]) } // This model describes the recurring execution schedule of an Agent. -model AgentExecutionSchedule { +model AgentGraphExecutionSchedule { id String @id agentGraphId String diff --git a/rnd/autogpt_server/test/executor/test_manager.py b/rnd/autogpt_server/test/executor/test_manager.py index 38af3f0471b0..c521c5d98cbb 100644 --- a/rnd/autogpt_server/test/executor/test_manager.py +++ b/rnd/autogpt_server/test/executor/test_manager.py @@ -45,18 +45,18 @@ async def create_test_graph() -> graph.Graph: return test_graph -async def execute_agent(test_manager: ExecutionManager, test_graph: graph.Graph): +async def execute_graph(test_manager: ExecutionManager, test_graph: graph.Graph): # --- Test adding new executions --- # text = "Hello, World!" input_data = {"input": text} agent_server = AgentServer() - response = await agent_server.execute_agent(test_graph.id, input_data) + response = await agent_server.execute_graph(test_graph.id, input_data) executions = response["executions"] - run_id = response["run_id"] + graph_exec_id = response["id"] assert len(executions) == 2 async def is_execution_completed(): - execs = await agent_server.get_executions(test_graph.id, run_id) + execs = await agent_server.get_executions(test_graph.id, graph_exec_id) return test_manager.queue.empty() and len(execs) == 4 # Wait for the executions to complete @@ -67,34 +67,30 @@ async def is_execution_completed(): # Execution queue should be empty assert await is_execution_completed() - executions = await agent_server.get_executions(test_graph.id, run_id) + executions = await agent_server.get_executions(test_graph.id, graph_exec_id) # Executing ParrotBlock1 exec = executions[0] assert exec.status == execution.ExecutionStatus.COMPLETED - assert exec.run_id == run_id - assert exec.output_name == "output" - assert exec.output_data == "Hello, World!" - assert exec.input_data == input_data + assert exec.graph_exec_id == graph_exec_id + assert exec.output_data == {"output": ["Hello, World!"]} + assert exec.input_data == {"input": text} assert exec.node_id == test_graph.nodes[0].id # Executing ParrotBlock2 exec = executions[1] assert exec.status == execution.ExecutionStatus.COMPLETED - assert exec.run_id == run_id - assert exec.output_name == "output" - assert exec.output_data == "Hello, World!" - assert exec.input_data == input_data + assert exec.graph_exec_id == graph_exec_id + assert exec.output_data == {"output": ["Hello, World!"]} + assert exec.input_data == {"input": text} assert exec.node_id == test_graph.nodes[1].id # Executing TextCombinerBlock exec = executions[2] assert exec.status == execution.ExecutionStatus.COMPLETED - assert exec.run_id == run_id - assert exec.output_name == "combined_text" - assert exec.output_data == "Hello, World!,Hello, World!" + assert exec.graph_exec_id == graph_exec_id + assert exec.output_data == {"combined_text": ["Hello, World!,Hello, World!"]} assert exec.input_data == { - "format": "{text1},{text2}", "text1": "Hello, World!", "text2": "Hello, World!", } @@ -103,9 +99,8 @@ async def is_execution_completed(): # Executing PrintingBlock exec = executions[3] assert exec.status == execution.ExecutionStatus.COMPLETED - assert exec.run_id == run_id - assert exec.output_name == "status" - assert exec.output_data == "printed" + assert exec.graph_exec_id == graph_exec_id + assert exec.output_data == {"status": ["printed"]} assert exec.input_data == {"text": "Hello, World!,Hello, World!"} assert exec.node_id == test_graph.nodes[3].id @@ -116,4 +111,4 @@ async def test_agent_execution(): with ExecutionManager(1) as test_manager: await db.connect() test_graph = await create_test_graph() - await execute_agent(test_manager, test_graph) + await execute_graph(test_manager, test_graph) From 6e1c9d44a4db87d508059eee180788519475637a Mon Sep 17 00:00:00 2001 From: Krzysztof Czerwinski Date: Wed, 26 Jun 2024 19:41:12 +0200 Subject: [PATCH 06/23] Update Agent Server `README.md` --- rnd/autogpt_server/README.md | 54 ++++++++++++++++++++++++++++++++++++ 1 file changed, 54 insertions(+) diff --git a/rnd/autogpt_server/README.md b/rnd/autogpt_server/README.md index 1563240a32e7..57342f245c25 100644 --- a/rnd/autogpt_server/README.md +++ b/rnd/autogpt_server/README.md @@ -18,3 +18,57 @@ It will also trigger the agent execution by pushing its execution request to the A component that will execute the agents. This component will be a pool of processes/threads that will consume the ExecutionQueue and execute the agent accordingly. The result and progress of its execution will be persisted in the database. + +## Setup + +This setup is for MacOS/Linux. +To setup the project follow these steps inside the project directory: + +1. Enter poetry shell + ``` + poetry shell + ``` + +1. Install dependencies + ``` + poetry install + ``` + +1. Generate prisma client + ``` + poetry run prisma generate + ``` + + In case prisma generates client for the global python installation instead of the virtual environment the current mitigation is to just uninstall the global prisma package: + ``` + pip uninstall prisma + ``` + + And then run the generation again. + The path *should* look something like this: + `/pypoetry/virtualenvs/autogpt-server-TQIRSwR6-py3.12/bin/prisma` + +1. Migrate the database, be careful because this deletes current data in the database + ``` + poetry run prisma migrate dev + ``` + +1. Start the server, this starts the server in the background + ``` + poetry run python ./autogpt_server/cli.py start + ``` + + You may need to change the permissions of the file to make it executable + ``` + chmod +x autogpt_server/cli.py + ``` + +1. Stop the server + ``` + poetry run python ./autogpt_server/cli.py stop + ``` + +1. To run the tests + ``` + poetry run pytest + ``` From dd960f9306e19fb30a3ffbe78734d322b31636e7 Mon Sep 17 00:00:00 2001 From: Aarushi <50577581+aarushik93@users.noreply.github.com> Date: Thu, 27 Jun 2024 10:02:54 +0100 Subject: [PATCH 07/23] Add support for nextjs based app (#7266) * Getting started with nextjs * fix linting * remove gitignore for package.json --- .gitignore | 2 +- rnd/autogpt_builder/.eslintrc.json | 3 + rnd/autogpt_builder/.gitignore | 36 +++++++++ rnd/autogpt_builder/README.md | 25 ++++++ rnd/autogpt_builder/next.config.mjs | 4 + rnd/autogpt_builder/package.json | 26 +++++++ rnd/autogpt_builder/postcss.config.mjs | 8 ++ rnd/autogpt_builder/public/autogpt.svg | 72 ++++++++++++++++++ rnd/autogpt_builder/src/app/favicon.ico | Bin 0 -> 15406 bytes rnd/autogpt_builder/src/app/globals.css | 33 ++++++++ rnd/autogpt_builder/src/app/layout.tsx | 22 ++++++ rnd/autogpt_builder/src/app/page.tsx | 97 ++++++++++++++++++++++++ rnd/autogpt_builder/tailwind.config.ts | 20 +++++ rnd/autogpt_builder/tsconfig.json | 26 +++++++ 14 files changed, 373 insertions(+), 1 deletion(-) create mode 100644 rnd/autogpt_builder/.eslintrc.json create mode 100644 rnd/autogpt_builder/.gitignore create mode 100644 rnd/autogpt_builder/README.md create mode 100644 rnd/autogpt_builder/next.config.mjs create mode 100644 rnd/autogpt_builder/package.json create mode 100644 rnd/autogpt_builder/postcss.config.mjs create mode 100644 rnd/autogpt_builder/public/autogpt.svg create mode 100644 rnd/autogpt_builder/src/app/favicon.ico create mode 100644 rnd/autogpt_builder/src/app/globals.css create mode 100644 rnd/autogpt_builder/src/app/layout.tsx create mode 100644 rnd/autogpt_builder/src/app/page.tsx create mode 100644 rnd/autogpt_builder/tailwind.config.ts create mode 100644 rnd/autogpt_builder/tsconfig.json diff --git a/.gitignore b/.gitignore index 3b4050b42eae..d23d09da8ed0 100644 --- a/.gitignore +++ b/.gitignore @@ -162,7 +162,7 @@ agbenchmark/reports/ # Nodejs package-lock.json -package.json + # Allow for locally private items # private diff --git a/rnd/autogpt_builder/.eslintrc.json b/rnd/autogpt_builder/.eslintrc.json new file mode 100644 index 000000000000..bffb357a7122 --- /dev/null +++ b/rnd/autogpt_builder/.eslintrc.json @@ -0,0 +1,3 @@ +{ + "extends": "next/core-web-vitals" +} diff --git a/rnd/autogpt_builder/.gitignore b/rnd/autogpt_builder/.gitignore new file mode 100644 index 000000000000..fd3dbb571a12 --- /dev/null +++ b/rnd/autogpt_builder/.gitignore @@ -0,0 +1,36 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.js +.yarn/install-state.gz + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* + +# local env files +.env*.local + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts diff --git a/rnd/autogpt_builder/README.md b/rnd/autogpt_builder/README.md new file mode 100644 index 000000000000..5aa16c678118 --- /dev/null +++ b/rnd/autogpt_builder/README.md @@ -0,0 +1,25 @@ +This is the frontend for AutoGPT's next generation + +## Getting Started + +First, run the development server: + +```bash +npm run dev +# or +yarn dev +# or +pnpm dev +# or +bun dev +``` + +Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. + +You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file. + +This project uses [`next/font`](https://nextjs.org/docs/basic-features/font-optimization) to automatically optimize and load Inter, a custom Google Font. + +## Deploy + +TODO \ No newline at end of file diff --git a/rnd/autogpt_builder/next.config.mjs b/rnd/autogpt_builder/next.config.mjs new file mode 100644 index 000000000000..4678774e6d60 --- /dev/null +++ b/rnd/autogpt_builder/next.config.mjs @@ -0,0 +1,4 @@ +/** @type {import('next').NextConfig} */ +const nextConfig = {}; + +export default nextConfig; diff --git a/rnd/autogpt_builder/package.json b/rnd/autogpt_builder/package.json new file mode 100644 index 000000000000..67b793bbadc6 --- /dev/null +++ b/rnd/autogpt_builder/package.json @@ -0,0 +1,26 @@ +{ + "name": "autogpt_builder", + "version": "0.1.0", + "private": true, + "scripts": { + "dev": "next dev", + "build": "next build", + "start": "next start", + "lint": "next lint" + }, + "dependencies": { + "react": "^18", + "react-dom": "^18", + "next": "14.2.4" + }, + "devDependencies": { + "typescript": "^5", + "@types/node": "^20", + "@types/react": "^18", + "@types/react-dom": "^18", + "postcss": "^8", + "tailwindcss": "^3.4.1", + "eslint": "^8", + "eslint-config-next": "14.2.4" + } +} diff --git a/rnd/autogpt_builder/postcss.config.mjs b/rnd/autogpt_builder/postcss.config.mjs new file mode 100644 index 000000000000..1a69fd2a450a --- /dev/null +++ b/rnd/autogpt_builder/postcss.config.mjs @@ -0,0 +1,8 @@ +/** @type {import('postcss-load-config').Config} */ +const config = { + plugins: { + tailwindcss: {}, + }, +}; + +export default config; diff --git a/rnd/autogpt_builder/public/autogpt.svg b/rnd/autogpt_builder/public/autogpt.svg new file mode 100644 index 000000000000..3f322e4d3a0a --- /dev/null +++ b/rnd/autogpt_builder/public/autogpt.svg @@ -0,0 +1,72 @@ + + + diff --git a/rnd/autogpt_builder/src/app/favicon.ico b/rnd/autogpt_builder/src/app/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..74db319671075557116db09405f64c789cefebb9 GIT binary patch literal 15406 zcmeHO2~<=^x-LbLRTe=O1zH4TRas1;xFD+_i=r$lE}%3hE{F>g*9Jv_77-F17mP`a zlgLbB9Fv*BC3{TBGMUWcON_zz@{&wWj0sA2(`(K5H+`{z27&nAnRDLU!*~9=|9-3L z)~&887#qUOn7us%6|ur$j6K2_6Nx(R{T+cSga4RSxmt~xX34;`4mqJs}&@{%`* zKjY6XwI<=K*}=OnEemPAm=p5B%h|ymZ?m;8zQ$J1QjgserE#r%k#w#sof!RQ-sErI z%nQAJX?bww@kD>ABuZpaA2nv|k+^Yfr;~lcjwg(N=E<0Gb;nui8$hZip(w3AcI_Kv z|NK1v4}Z5Hpy*tt|AMpg{lDE9}c7X;%NC!hO;$lfw2UK5k-mi7SAQJ zQE$v)PN~EbuC0?@Zfu$86vi9Z*vK35-xK)Zj9E=$tN{)_13m?w1Kt4dBY=2P=$b9{ z9C;(h%Px%Uets&?Td;?GA8>mjW6PlbH$ZO>-uEcykdIKtR>#I2%QDEF9s-n4d_om5^w`(1mcY2 zQyIROfqxwO=?2*llqsL%@Rv$Fly87{40M`7XFvpY)xnIpm@?9hu6~cZS9+R~2|W3} zn3^_8l&f%WOtlU**VNAYW*GCjm)pu?Z@2D=^X<;3d#z3F39X~F+LVlV^5WD;*^Y$o zxP%QgrNeCw25IeUT2-!9^on-5_>v_f^auuhc z;2~c$&WM00fX1iZE?I;49Pp1j$HaEVI+W8}d7g3gDm zB*feNqF0wq8uOkw^vwGO;jQl#gtxyZ4!itLUT6f# z@UDN2u}&>NChpR|&SwVIUdRsO-ElrEcnjHGWcg(mnV5q%&^=umU&$%8cNtfE?J4iz zFm@>;-0t$yiCN#R&RD8T1^DSy-|F9_ z_|H0>;&+1X0ok!wZ@cGMR@oVr$M-|2ZiQF>mg-}l=> z-L5|#>dH$)T|3*hbdt+{;sL)E;uGB#>Uxd%pou%#qh&kURK9~v?Tjy5v*4txZ~<2o zW=aIRBXQnMhvU3CzCo)59_r5%d=2nLfsbQNzK;V3e$Exy?aLcyUyrhqc!hqB`GO76 zd>_Yadfo93i3KdMtdIrp)hwVhe(Uzw$J%zrKY#b}cAw zXLiJ+S>d=w2k>ov9`nWqq@Z=y;)Yn zug$U?#%EcXCC*|tWwV$yk7Cw+#>aQO3EypA9OTp)UwL9x0OE7`3Gw&j*VGlrm?ORk z+kjVqb3hUBCBX4LO2oIOjLN`yfWC{bg7-VnRrqEP7ksYtc4z|l%5jdJ@ku5>nLl6N zu(HQ^OfvWngr9E!^4|j?UE}wV!{(|FVmJgA2BP(Dfi6CEs?0G{M%(XaUUp2rArK;K4P%pyPFFBJGD z;$MPn3h9C78}w9Y2fG;-!sNflTQ9^h&=&HGi{>16iDx1{J?}@PgnnZV z9i2fhJZuPS0;rAp6(4?b$yNaF&yrtEpPj%{6CZW)82BZE4pH16B|XT-34HMUfF^@Y zzv5#K@V=*K#C;I%_0Ng&3x@Z_7v;ANdN+^`)tNp%=q?}# zzVIuc?+f-71|OaEV7S-8M~+|(-VsxJ(9t6A6w*UkAKyr?)yME|2!2e^A<(CH|DKWE zKjCYsv2F(D-H_A#S}y28=WAfTpl1}HawoLVoL-RkMM)3O@hJLRxbc|M-DUz~wunCs zz#o2;8C^nrP%+rUfQLTUG78tDh=geD)c;oKA^bmJP=Cg=FYT-TOr@c9uHK<@ul$00 z>=|!L-d~<#Py9I9zL)|o2{iAo4e99|4qnB4n`_+nXAQ)zyRNJv=3HAvG;iA*wWPbO zuX~-{!Pm4-Qm%2?->-3_H(#={n0G0DYeh`wd)u3?F(32j2Cc&Zu63@eHRcbbOuvWP zb|&0u+Y`g@?u{PM??R1LeV%UQL5@1OPQL{P5?9c@w<&3+Z2KJERu&`w`ruScf!`m> z*^{Gm*7XXl;{nc{r24U}KX%4C@{+W3veG1uI?#VZjAi{PYpv>jjedYOsQ!{WRND2& zM$l0fEKHNHpT}jJlXb0I6HLx?1l`^!x2bp5*&OOkXCd4;=3_3J^s=?i@MvPDxB z;>=cgQ974xOzm@iDA@EyxlP?{t=&N->VRvQqxV-Bv|y zcwL>aW-Cu!WpWLtzyANf>Cq2Gnl!8c`dTMxYsg=!I4QoaXb_*^r&d@%-Q;yXZ-VXH zd7-gb<9NQgAiM?m?)`$Wx2}rA3f>ZjyBQmO!iw!bWu?de%1UqjnU${QOfpoEbJl2v zb+00w#S>p~izkkAhhrwkpO3`hcJCKVPHQfRX!>w<#Jvv+BUH_W5uY}%j@WVq>n;+* znwVjoh12go#Z&qE-xNYDfxfah#Gz?vNd4vIlVrwgB^~8Snu|FT-~7XpiH`~T@3VD* z@38gCci1}qHd}wai4~3(WL#A3s&lRULPOv1(betT@c^dCU9T&PL%rV24SnXVm7!{^ zHSzAideeum<%Z6pXTWdei~8TJ4PqHTRVeD}spIaYh5i*-S5dy48HDu~(>RwA_~&!k z{(AdYnK7r|5LWyyZ3r&E2mQk^uiB_S`cMVxS!Ww$)fvhU~rSg}a?*ruEM z@hM^}#;MXS`TuTFi2wPlpzi>FJ}XdjAuIT!3rj+ZURe~JgEgv`exDW8-XTLhW(BRP zo8>jTezxc&PHe20r+dCzHQkeAEl9mD(kn^GO`w&LW9vKQq~qCT-HUXv?zX&YhDV!T zC(`q3^%RloK^7Zwki~IA4U223Ve$9#&t4%?WG%m`$Xvk{3v;<@eqNXSpGo%H|8%l1 zCpM#vh=fhYG;umFW7AuCIcjj29@FQRo(Ueekq5hom)E$WefRpbQQL(!rmb+bi(tdH?tlJ7Ti3f8fH z&cj+(p0~X=*W3P|x#Jw(&lTDKC3l?tPq^20$Q=HfJKnJ~4Qs zV&)%`&n9pKT5{R=E_SUu;(Xi6<4W%BN|?>X;hk%0m(Ph$7Git`D1M}5_1K$VV(73!4K9<%;QFm@Qt!I&xx`$ zuZ*%X-!t3FEH&QJ(nDgR-UYjv=4M&bN0_xvA2Eziw=ip-Va430GV74(%#s_>GLu>M zyZ-SGN&$Ko`Gx?ep820Fm}>am(mL@cLLLvGLun5Yo1oE{Q*8&Tfnp5I;ouca9>VU1 zcB}<9qe0!k4+&*Mu%4{PR~TR(!Ftae*z5*qUv&-aCO+D+Ue)C{Xo0VT6#D_#HvoJc zd*n}fwx1Li;Tg`ze!UXQZV$YMBHm(v*6)P%KVki?4ZQ0DA9;4>51ybS1E2Pz^=p(y zd|E?3P0wMR!7?v9DfX;60cDf<8|y!)j2N^Y`m<47A+H{^m;8I^8TrzpO#9ndYU{Wb zx?4*Zx?wLw51U_AfHwq8sp7w7+7U|8MCVN1$gTIndv0h#|jLLkx=Z4R|L(so(wp3W{cLf!y6g>?FR8OG2PuoH6Zjne*6 zF4eu@o7x9<#{Hu=TY3)qU7@dAl#TN@w$UO+9mehvLX7@U$_aM$@JoBaLSGX6QlGg6 zd7dd>)L$S!1N>ydmd4{RfHY%U-1nA0jS&^_+2!|wv2AZ<8ncmO0@|O>s{J0g0??lR zZt63}_1@iwp$+H3mg<1c0$c$-i~jsLfKOW2-xQD;=l^%;(*jF5_?rPycmD&qr{|z| z54ku~|L804?tZD=Y3w0e!@RR|P0m=?IDh2*JY>|b1Q@ISTb7Z(@gDOt|CUMikrdiz zCZ6Fhz%{gWQXi`SmLkJ=F^1-PNU(D}`z6WJ8mrEw>auRk{#UqHh1)Oj^&nd=i5e!a zjP+FP#{x>@gU>!b$REkIjy1LVeJ1xx+H2a?ADhAI+^dM0jeXhcZF{4?y1RFF+F<<{ z+j0AYPivj9XX{$UweI`tjAakId~a_|g??X~_O>f#Ka}@ZYg>C1XLV`6lXKU~lMg#* zV}G);*duLwV~{)T%|}0FEE{ZDYjbFa&R)M~%{8u7ZyE2W4Ay6FcJkeESK7)u@<(4P z?@ezYe2vwiRNcs0PW!!RgB#qb%6%aA{ne$j7;QV}w6;m&sP6f_eK8CBYulF|A5rIv z{pufe_UJ=S(mNWL>g2xk2P>16CZ@=?CGxgi@w{zMv`SVn8$E58rnds zb(Yq0k)+qXzQN>=annM+CH1`QS4kXa);ZeX%3%DMvT0-)X58Y*ZD<3vj^1M@=`HSD z?Q2SRu)MYnX$i8D)E{J9=W^M$IXd~SIrx-4yh~+WJ6UH{|5fLEFuMA=ht&qWX#Obq z+Vq#?#dIzsg`*8VkZntND1Aq$0UC2~Zlqjidx+zI4yX;Z_#W*1)?H#H=nRH(v3NMn zbX3S!FKDN8CGwI~nS9IKb+6+)JQ#m{*>HB)MrTv^md*}i(I|{XqpR;~-K)0nK#6%@ zcD>1XV&Vw-ibXYwRoM!8;X*E7JDT;`EH_Yo^p#T5Dyp-eo4cGXTahwm}ID^C0p^tO*6zs9%e?L?+aTX|g9am!C z8g0Pa(^l!)7v|kjW-mX@7cc%_>+cVGM$9E_HO|!;I8XTk+TfpP1D%G>HU#b9>Pg$U zI--=T9^H(yOdGi>YCXsQCUNDQ)!(2Eo^4NCnZgs-4QM^&LHP_;{-dL0B=_1Apb?de zHGYytRlv^cnu*(G>abtwR8zLJDW`4vOf_@E8CA^s) { + return ( + + {children} + + ); +} diff --git a/rnd/autogpt_builder/src/app/page.tsx b/rnd/autogpt_builder/src/app/page.tsx new file mode 100644 index 000000000000..916ce244e8f8 --- /dev/null +++ b/rnd/autogpt_builder/src/app/page.tsx @@ -0,0 +1,97 @@ +import Image from "next/image"; + +export default function Home() { + return ( +
+
+

+ Get started by adding a  + node +

+ +
+ +
+ AutoGPT Logo +
+ + +
+ ); +} diff --git a/rnd/autogpt_builder/tailwind.config.ts b/rnd/autogpt_builder/tailwind.config.ts new file mode 100644 index 000000000000..e9a0944e7b31 --- /dev/null +++ b/rnd/autogpt_builder/tailwind.config.ts @@ -0,0 +1,20 @@ +import type { Config } from "tailwindcss"; + +const config: Config = { + content: [ + "./src/pages/**/*.{js,ts,jsx,tsx,mdx}", + "./src/components/**/*.{js,ts,jsx,tsx,mdx}", + "./src/app/**/*.{js,ts,jsx,tsx,mdx}", + ], + theme: { + extend: { + backgroundImage: { + "gradient-radial": "radial-gradient(var(--tw-gradient-stops))", + "gradient-conic": + "conic-gradient(from 180deg at 50% 50%, var(--tw-gradient-stops))", + }, + }, + }, + plugins: [], +}; +export default config; diff --git a/rnd/autogpt_builder/tsconfig.json b/rnd/autogpt_builder/tsconfig.json new file mode 100644 index 000000000000..7b2858930495 --- /dev/null +++ b/rnd/autogpt_builder/tsconfig.json @@ -0,0 +1,26 @@ +{ + "compilerOptions": { + "lib": ["dom", "dom.iterable", "esnext"], + "allowJs": true, + "skipLibCheck": true, + "strict": true, + "noEmit": true, + "esModuleInterop": true, + "module": "esnext", + "moduleResolution": "bundler", + "resolveJsonModule": true, + "isolatedModules": true, + "jsx": "preserve", + "incremental": true, + "plugins": [ + { + "name": "next" + } + ], + "paths": { + "@/*": ["./src/*"] + } + }, + "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"], + "exclude": ["node_modules"] +} From cdc658695f53688ff4a304e73294d148e846e409 Mon Sep 17 00:00:00 2001 From: Aarushi <50577581+aarushik93@users.noreply.github.com> Date: Thu, 27 Jun 2024 10:14:25 +0100 Subject: [PATCH 08/23] Add reactflow component in AutoGPT builder (#7270) * Getting started with nextjs * fix linting * remove gitignore for package.json * pulling in reactflow components * updating css * use environment variables * clean up css / ui a lil * Fixed nodes/run button animation so they are always visible --------- Co-authored-by: Bentlybro --- rnd/autogpt_builder/.env.example | 1 + rnd/autogpt_builder/package.json | 11 +- rnd/autogpt_builder/src/app/page.tsx | 123 +--- .../src/components/CustomNode.tsx | 115 ++++ rnd/autogpt_builder/src/components/Flow.tsx | 590 ++++++++++++++++++ rnd/autogpt_builder/src/components/flow.css | 150 +++++ 6 files changed, 896 insertions(+), 94 deletions(-) create mode 100644 rnd/autogpt_builder/.env.example create mode 100644 rnd/autogpt_builder/src/components/CustomNode.tsx create mode 100644 rnd/autogpt_builder/src/components/Flow.tsx create mode 100644 rnd/autogpt_builder/src/components/flow.css diff --git a/rnd/autogpt_builder/.env.example b/rnd/autogpt_builder/.env.example new file mode 100644 index 000000000000..a70a8847a25b --- /dev/null +++ b/rnd/autogpt_builder/.env.example @@ -0,0 +1 @@ +AGPT_SERVER_URL=http://localhost:8000 \ No newline at end of file diff --git a/rnd/autogpt_builder/package.json b/rnd/autogpt_builder/package.json index 67b793bbadc6..941e6ca669c7 100644 --- a/rnd/autogpt_builder/package.json +++ b/rnd/autogpt_builder/package.json @@ -9,18 +9,21 @@ "lint": "next lint" }, "dependencies": { + "next": "14.2.4", "react": "^18", "react-dom": "^18", - "next": "14.2.4" + "react-modal": "^3.16.1", + "reactflow": "^11.11.4" }, "devDependencies": { - "typescript": "^5", "@types/node": "^20", "@types/react": "^18", "@types/react-dom": "^18", + "@types/react-modal": "^3.16.3", + "eslint": "^8", + "eslint-config-next": "14.2.4", "postcss": "^8", "tailwindcss": "^3.4.1", - "eslint": "^8", - "eslint-config-next": "14.2.4" + "typescript": "^5" } } diff --git a/rnd/autogpt_builder/src/app/page.tsx b/rnd/autogpt_builder/src/app/page.tsx index 916ce244e8f8..63cefb866fc9 100644 --- a/rnd/autogpt_builder/src/app/page.tsx +++ b/rnd/autogpt_builder/src/app/page.tsx @@ -1,97 +1,40 @@ import Image from "next/image"; +import Flow from '../components/Flow'; export default function Home() { return ( -
-
-

- Get started by adding a  - node -

- -
+
+
+

+ Get started by adding a  + node +

+ +
-
- AutoGPT Logo -
- - -
+
+
+ +
+
+
); } diff --git a/rnd/autogpt_builder/src/components/CustomNode.tsx b/rnd/autogpt_builder/src/components/CustomNode.tsx new file mode 100644 index 000000000000..522cd7863549 --- /dev/null +++ b/rnd/autogpt_builder/src/components/CustomNode.tsx @@ -0,0 +1,115 @@ +import React, { useState, useEffect, FC, memo } from 'react'; +import { Handle, Position, NodeProps } from 'reactflow'; +import 'reactflow/dist/style.css'; + +type Schema = { + properties: { [key: string]: any }; +}; + +const CustomNode: FC = ({ data }) => { + const [isPropertiesOpen, setIsPropertiesOpen] = useState(data.isPropertiesOpen || false); + + // Automatically open properties when output_data or status is updated + useEffect(() => { + if (data.output_data || data.status) { + setIsPropertiesOpen(true); + } + }, [data.output_data, data.status]); + + const toggleProperties = () => { + setIsPropertiesOpen(!isPropertiesOpen); + }; + + const generateHandles = (schema: Schema, type: 'source' | 'target') => { + if (!schema?.properties) return null; + const keys = Object.keys(schema.properties); + return keys.map((key) => ( +
+ {type === 'target' && ( + <> + + {key} + + )} + {type === 'source' && ( + <> + {key} + + + )} +
+ )); + }; + + const handleInputChange = (key: string, value: any) => { + const newValues = { ...data.hardcodedValues, [key]: value }; + data.setHardcodedValues(newValues); + }; + + const isHandleConnected = (key: string) => { + return data.connections.some((conn: string) => { + const [, target] = conn.split(' -> '); + return target.includes(key) && target.includes(data.title); + }); + }; + + const hasDisconnectedHandle = (key: string) => { + return !isHandleConnected(key); + }; + + return ( +
+
+
+ {data?.title.replace(/\d+/g, '')} +
+ +
+
+
+ {data.inputSchema && generateHandles(data.inputSchema, 'target')} + {data.inputSchema && Object.keys(data.inputSchema.properties).map(key => ( + hasDisconnectedHandle(key) && ( +
+ handleInputChange(key, e.target.value)} + style={{ width: '100%', padding: '5px', borderRadius: '4px', border: '1px solid #555', background: '#444', color: '#e0e0e0' }} + /> +
+ ) + ))} +
+
+ {data.outputSchema && generateHandles(data.outputSchema, 'source')} +
+
+ {isPropertiesOpen && ( +
+

Node Output

+

Status: {typeof data.status === 'object' ? JSON.stringify(data.status) : data.status || 'N/A'}

+

Output Data: {typeof data.output_data === 'object' ? JSON.stringify(data.output_data) : data.output_data || 'N/A'}

+
+ )} +
+ ); +}; + +export default memo(CustomNode); diff --git a/rnd/autogpt_builder/src/components/Flow.tsx b/rnd/autogpt_builder/src/components/Flow.tsx new file mode 100644 index 000000000000..8a0191b3b11e --- /dev/null +++ b/rnd/autogpt_builder/src/components/Flow.tsx @@ -0,0 +1,590 @@ +"use client"; + +import React, { useState, useCallback, useEffect } from 'react'; +import ReactFlow, { + addEdge, + applyNodeChanges, + applyEdgeChanges, + Node, + Edge, + OnNodesChange, + OnEdgesChange, + OnConnect, + NodeTypes, + EdgeRemoveChange, +} from 'reactflow'; +import 'reactflow/dist/style.css'; +import Modal from 'react-modal'; +import CustomNode from './CustomNode'; +import './flow.css'; + +const initialNodes: Node[] = []; +const initialEdges: Edge[] = []; +const nodeTypes: NodeTypes = { + custom: CustomNode, +}; + +interface AvailableNode { + id: string; + name: string; + description: string; + inputSchema?: { properties: { [key: string]: any }; required?: string[] }; + outputSchema?: { properties: { [key: string]: any } }; +} + +interface ExecData { + node_id: string; + status: string; + output_data: any; +} + +const Flow: React.FC = () => { + const [nodes, setNodes] = useState(initialNodes); + const [edges, setEdges] = useState(initialEdges); + const [nodeId, setNodeId] = useState(1); + const [modalIsOpen, setModalIsOpen] = useState(false); + const [selectedNode, setSelectedNode] = useState(null); + const [title, setTitle] = useState(''); + const [description, setDescription] = useState(''); + const [variableName, setVariableName] = useState(''); + const [variableValue, setVariableValue] = useState(''); + const [printVariable, setPrintVariable] = useState(''); + const [isSidebarOpen, setIsSidebarOpen] = useState(false); + const [searchQuery, setSearchQuery] = useState(''); + const [availableNodes, setAvailableNodes] = useState([]); + const [loadingStatus, setLoadingStatus] = useState<'loading' | 'failed' | 'loaded'>('loading'); + const [agentId, setAgentId] = useState(null); + + const apiUrl = 'http://localhost:8000' + + useEffect(() => { + fetch(`${apiUrl}/blocks`) + .then(response => { + if (!response.ok) { + throw new Error(`HTTP error! Status: ${response.status}`); + } + return response.json(); + }) + .then(data => { + setAvailableNodes(data.map((node: AvailableNode) => ({ + ...node, + description: typeof node.description === 'object' ? JSON.stringify(node.description) : node.description, + }))); + setLoadingStatus('loaded'); + }) + .catch(error => { + console.error('Error fetching nodes:', error); + setLoadingStatus('failed'); + }); + }, []); + + const onNodesChange: OnNodesChange = useCallback( + (changes) => setNodes((nds) => applyNodeChanges(changes, nds).map(node => ({ + ...node, + data: { + ...node.data, + metadata: { + ...node.data.metadata, + position: node.position + } + } + }))), + [] + ); + + const onEdgesChange: OnEdgesChange = useCallback( + (changes) => { + const removedEdges = changes.filter((change): change is EdgeRemoveChange => change.type === 'remove'); + setEdges((eds) => applyEdgeChanges(changes, eds)); + + if (removedEdges.length > 0) { + setNodes((nds) => + nds.map((node) => { + const updatedConnections = node.data.connections.filter( + (conn: string) => + !removedEdges.some((edge) => edge.id && conn.includes(edge.id)) + ); + return { ...node, data: { ...node.data, connections: updatedConnections } }; + }) + ); + } + }, + [] + ); + + const onConnect: OnConnect = useCallback( + (connection) => { + setEdges((eds) => addEdge(connection, eds)); + setNodes((nds) => + nds.map((node) => { + if (node.id === connection.source) { + const connections = node.data.connections || []; + connections.push(`${node.data.title} ${connection.sourceHandle} -> ${connection.targetHandle}`); + return { ...node, data: { ...node.data, connections } }; + } + if (node.id === connection.target) { + const connections = node.data.connections || []; + connections.push(`${connection.sourceHandle} -> ${node.data.title} ${connection.targetHandle}`); + return { ...node, data: { ...node.data, connections } }; + } + return node; + }) + ); + }, + [setEdges, setNodes] + ); + + const addNode = (type: string, label: string, description: string) => { + const nodeSchema = availableNodes.find(node => node.name === label); + const position = { x: Math.random() * 400, y: Math.random() * 400 }; + + const newNode: Node = { + id: nodeId.toString(), + type: 'custom', + data: { + label: label, + title: `${type} ${nodeId}`, + description: `${description}`, + inputSchema: nodeSchema?.inputSchema, + outputSchema: nodeSchema?.outputSchema, + connections: [], + variableName: '', + variableValue: '', + printVariable: '', + setVariableName, + setVariableValue, + setPrintVariable, + hardcodedValues: {}, + setHardcodedValues: (values: { [key: string]: any }) => { + setNodes((nds) => nds.map((node) => + node.id === nodeId.toString() + ? { ...node, data: { ...node.data, hardcodedValues: values } } + : node + )); + }, + block_id: nodeSchema?.id || '', + metadata: { + position // Store position in metadata + } + }, + position, + }; + setNodes((nds) => [...nds, newNode]); + setNodeId((id) => id + 1); + }; + + const closeModal = () => { + setModalIsOpen(false); + setSelectedNode(null); + }; + + const saveNodeData = () => { + if (selectedNode) { + setNodes((nds) => + nds.map((node) => + node.id === selectedNode.id + ? { + ...node, + data: { + ...node.data, + title, + description, + label: title, + variableName, + variableValue: typeof variableValue === 'object' ? JSON.stringify(variableValue) : variableValue, + printVariable: typeof printVariable === 'object' ? JSON.stringify(printVariable) : printVariable, + }, + } + : node + ) + ); + closeModal(); + } + }; + + const toggleSidebar = () => { + setIsSidebarOpen(!isSidebarOpen); + }; + + const filteredNodes = availableNodes.filter(node => node.name.toLowerCase().includes(searchQuery.toLowerCase())); + + const prepareNodeInputData = (node: Node, allNodes: Node[], allEdges: Edge[]) => { + const nodeSchema = availableNodes.find(n => n.id === node.data.block_id); + if (!nodeSchema || !nodeSchema.inputSchema) return {}; + + let inputData: { [key: string]: any } = {}; + const inputProperties = nodeSchema.inputSchema.properties; + const requiredProperties = nodeSchema.inputSchema.required || []; + + // Initialize inputData with default values for all required properties + requiredProperties.forEach(prop => { + inputData[prop] = node.data.hardcodedValues[prop] || ''; + }); + + Object.keys(inputProperties).forEach(prop => { + const inputEdge = allEdges.find(edge => edge.target === node.id && edge.targetHandle === prop); + if (inputEdge) { + const sourceNode = allNodes.find(n => n.id === inputEdge.source); + inputData[prop] = sourceNode?.data.output_data || sourceNode?.data.hardcodedValues[prop] || ''; + } else if (node.data.hardcodedValues && node.data.hardcodedValues[prop]) { + inputData[prop] = node.data.hardcodedValues[prop]; + } + }); + + return inputData; + }; + + const updateNodeData = (execData: ExecData) => { + setNodes((nds) => + nds.map((node) => { + if (node.id === execData.node_id) { + return { + ...node, + data: { + ...node.data, + status: execData.status, + output_data: execData.output_data, + isPropertiesOpen: true, // Open the properties + }, + }; + } + return node; + }) + ); + }; + + const runAgent = async () => { + try { + const formattedNodes = nodes.map(node => ({ + id: node.id, + block_id: node.data.block_id, + input_default: prepareNodeInputData(node, nodes, edges), + input_nodes: edges.filter(edge => edge.target === node.id).reduce((acc, edge) => { + if (edge.targetHandle) { + acc[edge.targetHandle] = edge.source; + } + return acc; + }, {} as { [key: string]: string }), + output_nodes: edges.filter(edge => edge.source === node.id).reduce((acc, edge) => { + if (edge.sourceHandle) { + acc[edge.sourceHandle] = edge.target; + } + return acc; + }, {} as { [key: string]: string }), + metadata: node.data.metadata, + connections: node.data.connections // Ensure connections are preserved + })); + + const payload = { + id: '', + name: 'Agent Name', + description: 'Agent Description', + nodes: formattedNodes, + }; + + const createResponse = await fetch(`${apiUrl}/agents`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify(payload), + }); + + if (!createResponse.ok) { + throw new Error(`HTTP error! Status: ${createResponse.status}`); + } + + const createData = await createResponse.json(); + const agentId = createData.id; + setAgentId(agentId); + + const responseNodes = createData.nodes.map((node: any) => { + const block = availableNodes.find(n => n.id === node.block_id); + const connections = edges.filter(edge => edge.source === node.id || edge.target === node.id).map(edge => ({ + id: edge.id, + source: edge.source, + sourceHandle: edge.sourceHandle, + target: edge.target, + targetHandle: edge.targetHandle + })); + return { + id: node.id, + type: 'custom', + position: node.metadata.position, + data: { + label: block?.name || 'Unknown', + title: `${block?.name || 'Unknown'}`, + description: `${block?.description || ''}`, + inputSchema: block?.inputSchema, + outputSchema: block?.outputSchema, + connections: connections.map(c => `${c.source}-${c.sourceHandle} -> ${c.target}-${c.targetHandle}`), + variableName: '', + variableValue: '', + printVariable: '', + setVariableName, + setVariableValue, + setPrintVariable, + hardcodedValues: node.input_default, + setHardcodedValues: (values: { [key: string]: any }) => { + setNodes((nds) => nds.map((n) => + n.id === node.id + ? { ...n, data: { ...n.data, hardcodedValues: values } } + : n + )); + }, + block_id: node.block_id, + metadata: node.metadata + }, + }; + }); + + const newEdges = createData.nodes.flatMap((node: any) => { + return Object.entries(node.output_nodes).map(([sourceHandle, targetNodeId]) => ({ + id: `${node.id}-${sourceHandle}-${targetNodeId}`, + source: node.id, + sourceHandle: sourceHandle, + target: targetNodeId, + targetHandle: Object.keys(node.input_nodes).find(key => node.input_nodes[key] === targetNodeId) || '', + })); + }); + + setNodes(responseNodes); + setEdges(newEdges); + + const initialNodeInput = nodes.reduce((acc, node) => { + acc[node.id] = prepareNodeInputData(node, nodes, edges); + return acc; + }, {} as { [key: string]: any }); + + const nodeInputForExecution = Object.keys(initialNodeInput).reduce((acc, key) => { + const blockId = nodes.find(node => node.id === key)?.data.block_id; + const nodeSchema = availableNodes.find(n => n.id === blockId); + if (nodeSchema && nodeSchema.inputSchema) { + Object.keys(nodeSchema.inputSchema.properties).forEach(prop => { + acc[prop] = initialNodeInput[key][prop]; + }); + } + return acc; + }, {} as { [key: string]: any }); + + const executeResponse = await fetch(`${apiUrl}/agents/${agentId}/execute`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + body: JSON.stringify(nodeInputForExecution), + }); + + if (!executeResponse.ok) { + throw new Error(`HTTP error! Status: ${executeResponse.status}`); + } + + const executeData = await executeResponse.json(); + const runId = executeData.run_id; + + const startPolling = () => { + const endTime = Date.now() + 60000; + + const poll = async () => { + if (Date.now() >= endTime) { + console.log('Polling timeout reached.'); + return; + } + + try { + const response = await fetch(`${apiUrl}/agents/${agentId}/executions/${runId}`); + if (!response.ok) { + throw new Error(`HTTP error! Status: ${response.status}`); + } + + const data = await response.json(); + data.forEach(updateNodeData); + + const allCompleted = data.every((exec: any) => exec.status === 'COMPLETED'); + if (allCompleted) { + console.log('All nodes are completed.'); + return; + } + + setTimeout(poll, 100); + } catch (error) { + console.error('Error during polling:', error); + setTimeout(poll, 100); + } + }; + + poll(); + }; + + startPolling(); + } catch (error) { + console.error('Error running agent:', error); + } + }; + + return ( +
+
+ + + {agentId && ( + + Agent ID: {agentId} + + )} +
+
+ +
+ {selectedNode && ( + +

Edit Node

+
{ + e.preventDefault(); + saveNodeData(); + }} + > +
+ +
+
+