Skip to content

Commit

Permalink
Merge branch 'dev' into reinier/secrt-994-update-caching-for-frontend…
Browse files Browse the repository at this point in the history
…-in-ci
  • Loading branch information
Pwuts committed Nov 18, 2024
2 parents 721f725 + 29cff1b commit 658816d
Show file tree
Hide file tree
Showing 18 changed files with 141 additions and 43 deletions.
9 changes: 7 additions & 2 deletions autogpt_platform/autogpt_libs/autogpt_libs/auth/depends.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import fastapi

from .middleware import auth_middleware
from .models import User
from .models import User, DEFAULT_USER_ID, DEFAULT_EMAIL
from .config import Settings


def requires_user(payload: dict = fastapi.Depends(auth_middleware)) -> User:
Expand All @@ -16,8 +17,12 @@ def requires_admin_user(

def verify_user(payload: dict | None, admin_only: bool) -> User:
if not payload:
if Settings.ENABLE_AUTH:
raise fastapi.HTTPException(
status_code=401, detail="Authorization header is missing"
)
# This handles the case when authentication is disabled
payload = {"sub": "3e53486c-cf57-477e-ba2a-cb02dc828e1a", "role": "admin"}
payload = {"sub": DEFAULT_USER_ID, "role": "admin"}

user_id = payload.get("sub")

Expand Down
3 changes: 3 additions & 0 deletions autogpt_platform/autogpt_libs/autogpt_libs/auth/models.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
from dataclasses import dataclass

DEFAULT_USER_ID = "3e53486c-cf57-477e-ba2a-cb02dc828e1a"
DEFAULT_EMAIL = "[email protected]"


# Using dataclass here to avoid adding dependency on pydantic
@dataclass(frozen=True)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,13 +79,20 @@
title="Use Credits for Jina",
expires_at=None,
)
unreal_credentials = APIKeyCredentials(#
unreal_credentials = APIKeyCredentials(
id="66f20754-1b81-48e4-91d0-f4f0dd82145f",
provider="unreal",
api_key=SecretStr(settings.secrets.unreal_speech_api_key),
title="Use Credits for Unreal",
expires_at=None,
)
open_router_credentials = APIKeyCredentials(
id="b5a0e27d-0c98-4df3-a4b9-10193e1f3c40",
provider="open_router",
api_key=SecretStr(settings.secrets.open_router_api_key),
title="Use Credits for Open Router",
expires_at=None,
)


DEFAULT_CREDENTIALS = [
Expand All @@ -98,6 +105,7 @@
did_credentials,
jina_credentials,
unreal_credentials,
open_router_credentials,
]


Expand Down Expand Up @@ -145,6 +153,8 @@ def get_all_creds(self, user_id: str) -> list[Credentials]:
all_credentials.append(jina_credentials)
if settings.secrets.unreal_speech_api_key:
all_credentials.append(unreal_credentials)
if settings.secrets.open_router_api_key:
all_credentials.append(open_router_credentials)
return all_credentials

def get_creds_by_id(self, user_id: str, credentials_id: str) -> Credentials | None:
Expand Down
1 change: 1 addition & 0 deletions autogpt_platform/backend/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@ GOOGLE_CLIENT_SECRET=
OPENAI_API_KEY=
ANTHROPIC_API_KEY=
GROQ_API_KEY=
OPEN_ROUTER_API_KEY=

# Reddit
REDDIT_CLIENT_ID=
Expand Down
55 changes: 53 additions & 2 deletions autogpt_platform/backend/backend/blocks/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
# "ollama": BlockSecret(value=""),
# }

LLMProviderName = Literal["anthropic", "groq", "openai", "ollama"]
LLMProviderName = Literal["anthropic", "groq", "openai", "ollama", "open_router"]
AICredentials = CredentialsMetaInput[LLMProviderName, Literal["api_key"]]

TEST_CREDENTIALS = APIKeyCredentials(
Expand All @@ -51,7 +51,7 @@
def AICredentialsField() -> AICredentials:
return CredentialsField(
description="API key for the LLM provider.",
provider=["anthropic", "groq", "openai", "ollama"],
provider=["anthropic", "groq", "openai", "ollama", "open_router"],
supported_credential_types={"api_key"},
discriminator="model",
discriminator_mapping={
Expand Down Expand Up @@ -108,6 +108,18 @@ class LlmModel(str, Enum, metaclass=LlmModelMeta):
# Ollama models
OLLAMA_LLAMA3_8B = "llama3"
OLLAMA_LLAMA3_405B = "llama3.1:405b"
# OpenRouter models
GEMINI_FLASH_1_5_8B = "google/gemini-flash-1.5"
GEMINI_FLASH_1_5_EXP = "google/gemini-flash-1.5-exp"
GROK_BETA = "x-ai/grok-beta"
MISTRAL_NEMO = "mistralai/mistral-nemo"
COHERE_COMMAND_R_08_2024 = "cohere/command-r-08-2024"
COHERE_COMMAND_R_PLUS_08_2024 = "cohere/command-r-plus-08-2024"
EVA_QWEN_2_5_32B = "eva-unit-01/eva-qwen-2.5-32b"
DEEPSEEK_CHAT = "deepseek/deepseek-chat"
PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE = (
"perplexity/llama-3.1-sonar-large-128k-online"
)

@property
def metadata(self) -> ModelMetadata:
Expand Down Expand Up @@ -142,6 +154,17 @@ def context_window(self) -> int:
LlmModel.LLAMA3_1_8B: ModelMetadata("groq", 131072),
LlmModel.OLLAMA_LLAMA3_8B: ModelMetadata("ollama", 8192),
LlmModel.OLLAMA_LLAMA3_405B: ModelMetadata("ollama", 8192),
LlmModel.GEMINI_FLASH_1_5_8B: ModelMetadata("open_router", 8192),
LlmModel.GEMINI_FLASH_1_5_EXP: ModelMetadata("open_router", 8192),
LlmModel.GROK_BETA: ModelMetadata("open_router", 8192),
LlmModel.MISTRAL_NEMO: ModelMetadata("open_router", 4000),
LlmModel.COHERE_COMMAND_R_08_2024: ModelMetadata("open_router", 4000),
LlmModel.COHERE_COMMAND_R_PLUS_08_2024: ModelMetadata("open_router", 4000),
LlmModel.EVA_QWEN_2_5_32B: ModelMetadata("open_router", 4000),
LlmModel.DEEPSEEK_CHAT: ModelMetadata("open_router", 8192),
LlmModel.PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE: ModelMetadata(
"open_router", 8192
),
}

for model in LlmModel:
Expand Down Expand Up @@ -354,6 +377,34 @@ def llm_call(
response.get("prompt_eval_count") or 0,
response.get("eval_count") or 0,
)
elif provider == "open_router":
client = openai.OpenAI(
base_url="https://openrouter.ai/api/v1",
api_key=credentials.api_key.get_secret_value(),
)

response = client.chat.completions.create(
extra_headers={
"HTTP-Referer": "https://agpt.co",
"X-Title": "AutoGPT",
},
model=llm_model.value,
messages=prompt, # type: ignore
max_tokens=max_tokens,
)

# If there's no response, raise an error
if not response.choices:
if response:
raise ValueError(f"OpenRouter error: {response}")
else:
raise ValueError("No response from OpenRouter.")

return (
response.choices[0].message.content or "",
response.usage.prompt_tokens if response.usage else 0,
response.usage.completion_tokens if response.usage else 0,
)
else:
raise ValueError(f"Unsupported LLM provider: {provider}")

Expand Down
27 changes: 27 additions & 0 deletions autogpt_platform/backend/backend/data/block_cost_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
groq_credentials,
ideogram_credentials,
jina_credentials,
open_router_credentials,
openai_credentials,
replicate_credentials,
revid_credentials,
Expand Down Expand Up @@ -54,6 +55,15 @@
LlmModel.LLAMA3_1_8B: 1,
LlmModel.OLLAMA_LLAMA3_8B: 1,
LlmModel.OLLAMA_LLAMA3_405B: 1,
LlmModel.GEMINI_FLASH_1_5_8B: 1,
LlmModel.GEMINI_FLASH_1_5_EXP: 1,
LlmModel.GROK_BETA: 5,
LlmModel.MISTRAL_NEMO: 1,
LlmModel.COHERE_COMMAND_R_08_2024: 1,
LlmModel.COHERE_COMMAND_R_PLUS_08_2024: 3,
LlmModel.EVA_QWEN_2_5_32B: 1,
LlmModel.DEEPSEEK_CHAT: 2,
LlmModel.PERPLEXITY_LLAMA_3_1_SONAR_LARGE_128K_ONLINE: 1,
}

for model in LlmModel:
Expand Down Expand Up @@ -124,6 +134,23 @@
cost_filter={"api_key": None},
),
]
# Open Router Models
+ [
BlockCost(
cost_type=BlockCostType.RUN,
cost_filter={
"model": model,
"credentials": {
"id": open_router_credentials.id,
"provider": open_router_credentials.provider,
"type": open_router_credentials.type,
},
},
cost_amount=cost,
)
for model, cost in MODEL_COST.items()
if MODEL_METADATA[model].provider == "open_router"
]
)

# =============== This is the exhaustive list of cost for each Block =============== #
Expand Down
4 changes: 1 addition & 3 deletions autogpt_platform/backend/backend/data/user.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import logging
from typing import Optional, cast

from autogpt_libs.auth.models import DEFAULT_USER_ID
from autogpt_libs.supabase_integration_credentials_store.types import (
UserIntegrations,
UserMetadata,
Expand All @@ -15,9 +16,6 @@

logger = logging.getLogger(__name__)

DEFAULT_USER_ID = "3e53486c-cf57-477e-ba2a-cb02dc828e1a"
DEFAULT_EMAIL = "[email protected]"


async def get_or_create_user(user_data: dict) -> User:
user_id = user_data.get("sub")
Expand Down
3 changes: 2 additions & 1 deletion autogpt_platform/backend/backend/server/routers/v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,8 @@ def execute_graph_block(block_id: str, data: BlockInput) -> CompletedBlockOutput
async def get_user_credits(
user_id: Annotated[str, Depends(get_user_id)]
) -> dict[str, int]:
return {"credits": await _user_credit_model.get_or_refill_credit(user_id)}
# Credits can go negative, so ensure it's at least 0 for user to see.
return {"credits": max(await _user_credit_model.get_or_refill_credit(user_id), 0)}


########################################################
Expand Down
17 changes: 5 additions & 12 deletions autogpt_platform/backend/backend/server/utils.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,11 @@
from autogpt_libs.auth.middleware import auth_middleware
from fastapi import Depends, HTTPException
from autogpt_libs.auth.depends import requires_user
from autogpt_libs.auth.models import User
from fastapi import Depends

from backend.data.user import DEFAULT_USER_ID
from backend.util.settings import Settings

settings = Settings()


def get_user_id(payload: dict = Depends(auth_middleware)) -> str:
if not payload:
# This handles the case when authentication is disabled
return DEFAULT_USER_ID

user_id = payload.get("sub")
if not user_id:
raise HTTPException(status_code=401, detail="User ID not found in token")
return user_id
def get_user_id(user: User = Depends(requires_user)) -> str:
return user.user_id
32 changes: 16 additions & 16 deletions autogpt_platform/backend/backend/server/ws_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,24 +53,24 @@ async def event_broadcaster(manager: ConnectionManager):


async def authenticate_websocket(websocket: WebSocket) -> str:
if settings.config.enable_auth:
token = websocket.query_params.get("token")
if not token:
await websocket.close(code=4001, reason="Missing authentication token")
return ""
if not settings.config.enable_auth:
return DEFAULT_USER_ID

token = websocket.query_params.get("token")
if not token:
await websocket.close(code=4001, reason="Missing authentication token")
return ""

try:
payload = parse_jwt_token(token)
user_id = payload.get("sub")
if not user_id:
await websocket.close(code=4002, reason="Invalid token")
return ""
return user_id
except ValueError:
await websocket.close(code=4003, reason="Invalid token")
try:
payload = parse_jwt_token(token)
user_id = payload.get("sub")
if not user_id:
await websocket.close(code=4002, reason="Invalid token")
return ""
else:
return DEFAULT_USER_ID
return user_id
except ValueError:
await websocket.close(code=4003, reason="Invalid token")
return ""


async def handle_subscribe(
Expand Down
1 change: 1 addition & 0 deletions autogpt_platform/backend/backend/util/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -238,6 +238,7 @@ class Secrets(UpdateTrackingModel["Secrets"], BaseSettings):
openai_api_key: str = Field(default="", description="OpenAI API key")
anthropic_api_key: str = Field(default="", description="Anthropic API key")
groq_api_key: str = Field(default="", description="Groq API key")
open_router_api_key: str = Field(default="", description="Open Router API Key")

reddit_client_id: str = Field(default="", description="Reddit client ID")
reddit_client_secret: str = Field(default="", description="Reddit client secret")
Expand Down
1 change: 1 addition & 0 deletions autogpt_platform/frontend/src/app/profile/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ export default function PrivatePage() {
"7f7b0654-c36b-4565-8fa7-9a52575dfae2", // D-ID
"7f26de70-ba0d-494e-ba76-238e65e7b45f", // Jina
"66f20754-1b81-48e4-91d0-f4f0dd82145f", // Unreal Speech
"b5a0e27d-0c98-4df3-a4b9-10193e1f3c40", // Open Router
],
[],
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,10 @@ export const BlocksControl: React.FC<BlocksControlProps> = ({
{beautifyString(block.name).replace(/ Block$/, "")}
</span>
<span className="block break-words text-xs font-normal text-gray-500">
{block.description}
{/* Cap description at 100 characters max */}
{block.description?.length > 100
? block.description.slice(0, 100) + "..."
: block.description}
</span>
</div>
<div
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ export const providerIcons: Record<
ollama: fallbackIcon,
openai: fallbackIcon,
openweathermap: fallbackIcon,
open_router: fallbackIcon,
pinecone: fallbackIcon,
replicate: fallbackIcon,
revid: fallbackIcon,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ const providerDisplayNames: Record<CredentialsProviderName, string> = {
ollama: "Ollama",
openai: "OpenAI",
openweathermap: "OpenWeatherMap",
open_router: "Open Router",
pinecone: "Pinecone",
replicate: "Replicate",
revid: "Rev.ID",
Expand Down
7 changes: 4 additions & 3 deletions autogpt_platform/frontend/src/components/monitor/FlowInfo.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -105,10 +105,11 @@ export const FlowInfo: React.FC<
</DropdownMenu>
)}
<Link
className={buttonVariants({ variant: "outline" })}
className={buttonVariants({ variant: "default" })}
href={`/build?flowID=${flow.id}`}
>
<Pencil2Icon />
<Pencil2Icon className="mr-2" />
Open in Builder
</Link>
<Button
variant="outline"
Expand All @@ -126,7 +127,7 @@ export const FlowInfo: React.FC<
)
}
>
<ExitIcon />
<ExitIcon className="mr-2" /> Export
</Button>
<Button variant="outline" onClick={() => setIsDeleteModalOpen(true)}>
<Trash2Icon className="h-full" />
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,10 +47,10 @@ export const FlowRunInfo: React.FC<
</Button>
)}
<Link
className={buttonVariants({ variant: "outline" })}
className={buttonVariants({ variant: "default" })}
href={`/build?flowID=${flow.id}`}
>
<Pencil2Icon className="mr-2" /> Edit Agent
<Pencil2Icon className="mr-2" /> Open in Builder
</Link>
</div>
</CardHeader>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,7 @@ export const PROVIDER_NAMES = {
OLLAMA: "ollama",
OPENAI: "openai",
OPENWEATHERMAP: "openweathermap",
OPEN_ROUTER: "open_router",
PINECONE: "pinecone",
REPLICATE: "replicate",
REVID: "revid",
Expand Down

0 comments on commit 658816d

Please sign in to comment.