Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Temperature #3310

Merged
merged 7 commits into from
Dec 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion backend/danswer/llm/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ def _create_llm(model: str) -> LLM:
api_base=llm_provider.api_base,
api_version=llm_provider.api_version,
custom_config=llm_provider.custom_config,
temperature=temperature_override,
additional_headers=additional_headers,
long_term_logger=long_term_logger,
)
Expand Down Expand Up @@ -128,11 +129,13 @@ def get_llm(
api_base: str | None = None,
api_version: str | None = None,
custom_config: dict[str, str] | None = None,
temperature: float = GEN_AI_TEMPERATURE,
temperature: float | None = None,
timeout: int = QA_TIMEOUT,
additional_headers: dict[str, str] | None = None,
long_term_logger: LongTermLogger | None = None,
) -> LLM:
if temperature is None:
temperature = GEN_AI_TEMPERATURE
return DefaultMultiLLM(
model_provider=provider,
model_name=model,
Expand Down
3 changes: 3 additions & 0 deletions web/src/app/admin/configuration/llm/interfaces.ts
Original file line number Diff line number Diff line change
Expand Up @@ -89,3 +89,6 @@ export const getProviderIcon = (providerName: string, modelName?: string) => {
return CPUIcon;
}
};

export const isAnthropic = (provider: string, modelName: string) =>
provider === "anthropic" || modelName.toLowerCase().includes("claude");
2 changes: 1 addition & 1 deletion web/src/app/chat/ChatPage.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -411,7 +411,7 @@ export function ChatPage({

// reset LLM overrides (based on chat session!)
llmOverrideManager.updateModelOverrideForChatSession(selectedChatSession);
llmOverrideManager.setTemperature(null);
llmOverrideManager.updateTemperature(null);

// remove uploaded files
setCurrentMessageFiles([]);
Expand Down
1 change: 0 additions & 1 deletion web/src/app/chat/RegenerateOption.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,6 @@ import { destructureValue, getFinalLLM, structureValue } from "@/lib/llm/utils";
import { useState } from "react";
import { Hoverable } from "@/components/Hoverable";
import { Popover } from "@/components/popover/Popover";
import { StarFeedback } from "@/components/icons/icons";
import { IconType } from "react-icons";
import { FiRefreshCw } from "react-icons/fi";

Expand Down
30 changes: 7 additions & 23 deletions web/src/app/chat/modal/configuration/LlmTab.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -35,25 +35,9 @@ export const LlmTab = forwardRef<HTMLDivElement, LlmTabProps>(
checkPersonaRequiresImageGeneration(currentAssistant);

const { llmProviders } = useChatContext();
const { setLlmOverride, temperature, setTemperature } = llmOverrideManager;
const { setLlmOverride, temperature, updateTemperature } =
llmOverrideManager;
const [isTemperatureExpanded, setIsTemperatureExpanded] = useState(false);
const [localTemperature, setLocalTemperature] = useState<number>(
temperature || 0
);
const debouncedSetTemperature = useCallback(
(value: number) => {
const debouncedFunction = debounce((value: number) => {
setTemperature(value);
}, 300);
return debouncedFunction(value);
},
[setTemperature]
);

const handleTemperatureChange = (value: number) => {
setLocalTemperature(value);
debouncedSetTemperature(value);
};

return (
<div className="w-full">
Expand Down Expand Up @@ -108,26 +92,26 @@ export const LlmTab = forwardRef<HTMLDivElement, LlmTabProps>(
<input
type="range"
onChange={(e) =>
handleTemperatureChange(parseFloat(e.target.value))
updateTemperature(parseFloat(e.target.value))
}
className="w-full p-2 border border-border rounded-md"
min="0"
max="2"
step="0.01"
value={localTemperature}
value={temperature || 0}
/>
<div
className="absolute text-sm"
style={{
left: `${(localTemperature || 0) * 50}%`,
left: `${(temperature || 0) * 50}%`,
transform: `translateX(-${Math.min(
Math.max((localTemperature || 0) * 50, 10),
Math.max((temperature || 0) * 50, 10),
90
)}%)`,
top: "-1.5rem",
}}
>
{localTemperature}
{temperature}
</div>
</div>
</>
Expand Down
42 changes: 14 additions & 28 deletions web/src/components/chat_search/AssistantSelector.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -46,19 +46,17 @@ const AssistantSelector = ({
liveAssistant: Persona;
onAssistantChange: (assistant: Persona) => void;
chatSessionId?: string;
llmOverrideManager?: LlmOverrideManager;
llmOverrideManager: LlmOverrideManager;
isMobile: boolean;
}) => {
const { finalAssistants } = useAssistants();
const [isOpen, setIsOpen] = useState(false);
const dropdownRef = useRef<HTMLDivElement>(null);
const { llmProviders } = useChatContext();
const { user } = useUser();

const [assistants, setAssistants] = useState<Persona[]>(finalAssistants);
const [isTemperatureExpanded, setIsTemperatureExpanded] = useState(false);
const [localTemperature, setLocalTemperature] = useState<number>(
llmOverrideManager?.temperature || 0
);

// Initialize selectedTab from localStorage
const [selectedTab, setSelectedTab] = useState<number>(() => {
Expand Down Expand Up @@ -92,21 +90,6 @@ const AssistantSelector = ({
}
};

const debouncedSetTemperature = useCallback(
(value: number) => {
const debouncedFunction = debounce((value: number) => {
llmOverrideManager?.setTemperature(value);
}, 300);
return debouncedFunction(value);
},
[llmOverrideManager]
);

const handleTemperatureChange = (value: number) => {
setLocalTemperature(value);
debouncedSetTemperature(value);
};

// Handle tab change and update localStorage
const handleTabChange = (index: number) => {
setSelectedTab(index);
Expand All @@ -119,7 +102,7 @@ const AssistantSelector = ({
const [_, currentLlm] = getFinalLLM(
llmProviders,
liveAssistant,
llmOverrideManager?.llmOverride ?? null
llmOverrideManager.llmOverride ?? null
);

const requiresImageGeneration =
Expand Down Expand Up @@ -204,19 +187,17 @@ const AssistantSelector = ({
llmProviders={llmProviders}
currentLlm={currentLlm}
userDefault={userDefaultModel}
includeUserDefault={true}
onSelect={(value: string | null) => {
if (value == null) return;
const { modelName, name, provider } = destructureValue(value);
llmOverrideManager?.setLlmOverride({
llmOverrideManager.setLlmOverride({
name,
provider,
modelName,
});
if (chatSessionId) {
updateModelOverrideForChatSession(chatSessionId, value);
}
setIsOpen(false);
}}
/>
<div className="mt-4">
Expand All @@ -243,26 +224,31 @@ const AssistantSelector = ({
<input
type="range"
onChange={(e) =>
handleTemperatureChange(parseFloat(e.target.value))
llmOverrideManager.updateTemperature(
parseFloat(e.target.value)
)
}
className="w-full p-2 border border-border rounded-md"
min="0"
max="2"
step="0.01"
value={localTemperature}
value={llmOverrideManager.temperature?.toString() || "0"}
/>
<div
className="absolute text-sm"
style={{
left: `${(localTemperature || 0) * 50}%`,
left: `${(llmOverrideManager.temperature || 0) * 50}%`,
transform: `translateX(-${Math.min(
Math.max((localTemperature || 0) * 50, 10),
Math.max(
(llmOverrideManager.temperature || 0) * 50,
10
),
90
)}%)`,
top: "-1.5rem",
}}
>
{localTemperature}
{llmOverrideManager.temperature}
</div>
</div>
</>
Expand Down
2 changes: 0 additions & 2 deletions web/src/components/llm/LLMList.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ interface LlmListProps {
scrollable?: boolean;
hideProviderIcon?: boolean;
requiresImageGeneration?: boolean;
includeUserDefault?: boolean;
currentAssistant?: Persona;
}

Expand All @@ -31,7 +30,6 @@ export const LlmList: React.FC<LlmListProps> = ({
userDefault,
scrollable,
requiresImageGeneration,
includeUserDefault = false,
}) => {
const llmOptionsByProvider: {
[provider: string]: {
Expand Down
24 changes: 21 additions & 3 deletions web/src/lib/hooks.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import { UsersResponse } from "./users/interfaces";
import { Credential } from "./connectors/credentials";
import { SettingsContext } from "@/components/settings/SettingsProvider";
import { PersonaCategory } from "@/app/admin/assistants/interfaces";
import { isAnthropic } from "@/app/admin/configuration/llm/interfaces";

const CREDENTIAL_URL = "/api/manage/admin/credential";

Expand Down Expand Up @@ -71,7 +72,9 @@ export const useConnectorCredentialIndexingStatus = (
getEditable = false
) => {
const { mutate } = useSWRConfig();
const url = `${INDEXING_STATUS_URL}${getEditable ? "?get_editable=true" : ""}`;
const url = `${INDEXING_STATUS_URL}${
getEditable ? "?get_editable=true" : ""
}`;
const swrResponse = useSWR<ConnectorIndexingStatus<any, any>[]>(
url,
errorHandlingFetcher,
Expand Down Expand Up @@ -157,7 +160,7 @@ export interface LlmOverrideManager {
globalDefault: LlmOverride;
setGlobalDefault: React.Dispatch<React.SetStateAction<LlmOverride>>;
temperature: number | null;
setTemperature: React.Dispatch<React.SetStateAction<number | null>>;
updateTemperature: (temperature: number | null) => void;
updateModelOverrideForChatSession: (chatSession?: ChatSession) => void;
}
export function useLlmOverride(
Expand Down Expand Up @@ -212,16 +215,31 @@ export function useLlmOverride(
setTemperature(defaultTemperature !== undefined ? defaultTemperature : 0);
}, [defaultTemperature]);

useEffect(() => {
if (isAnthropic(llmOverride.provider, llmOverride.modelName)) {
setTemperature((prevTemp) => Math.min(prevTemp ?? 0, 1.0));
}
}, [llmOverride]);

const updateTemperature = (temperature: number | null) => {
if (isAnthropic(llmOverride.provider, llmOverride.modelName)) {
setTemperature((prevTemp) => Math.min(temperature ?? 0, 1.0));
} else {
setTemperature(temperature);
}
};

return {
updateModelOverrideForChatSession,
llmOverride,
setLlmOverride,
globalDefault,
setGlobalDefault,
temperature,
setTemperature,
updateTemperature,
};
}

/*
EE Only APIs
*/
Expand Down
Loading