Skip to content

Commit

Permalink
Overheat LLMs
Browse files Browse the repository at this point in the history
OpenAI LLMs can go up to 2 as far as temperature.
We don't enable >1 by default, but we have a new labs setting
to enable 'overheating' (max temperature raised
from 1 to 2) for Really Well Done LLMs.
  • Loading branch information
enricoros committed Dec 5, 2023
1 parent 021fa3b commit a41667f
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 5 deletions.
10 changes: 8 additions & 2 deletions src/apps/settings-modal/UxLabsSettings.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import * as React from 'react';

import { FormControl, Typography } from '@mui/joy';
import CallIcon from '@mui/icons-material/Call';
import DeviceThermostatIcon from '@mui/icons-material/DeviceThermostat';
import FormatPaintIcon from '@mui/icons-material/FormatPaint';
import VerticalSplitIcon from '@mui/icons-material/VerticalSplit';
import YouTubeIcon from '@mui/icons-material/YouTube';
Expand All @@ -16,8 +17,8 @@ export function UxLabsSettings() {

// external state
const {
labsCalling, /*labsEnhancedUI,*/ labsMagicDraw, labsPersonaYTCreator, labsSplitBranching,
setLabsCalling, /*setLabsEnhancedUI,*/ setLabsMagicDraw, setLabsPersonaYTCreator, setLabsSplitBranching,
labsCalling, /*labsEnhancedUI,*/ labsLlmOvertemp, labsMagicDraw, labsPersonaYTCreator, labsSplitBranching,
setLabsCalling, /*setLabsEnhancedUI,*/ setLabsLlmOvertemp, setLabsMagicDraw, setLabsPersonaYTCreator, setLabsSplitBranching,
} = useUXLabsStore();

return <>
Expand All @@ -42,6 +43,11 @@ export function UxLabsSettings() {
checked={labsSplitBranching} onChange={setLabsSplitBranching}
/>

<FormSwitchControl
title={<><DeviceThermostatIcon sx={{ mr: 0.25 }} /> Overtemperature</>} description={labsLlmOvertemp ? 'LLMs ♨️' : 'Disabled'}
checked={labsLlmOvertemp} onChange={setLabsLlmOvertemp}
/>

{/*<FormSwitchControl*/}
{/* title='Enhanced UI' description={labsEnhancedUI ? 'Enabled' : 'Disabled'}*/}
{/* checked={labsEnhancedUI} onChange={setLabsEnhancedUI}*/}
Expand Down
6 changes: 6 additions & 0 deletions src/common/state/store-ux-labs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@ interface UXLabsStore {
labsSplitBranching: boolean;
setLabsSplitBranching: (labsSplitBranching: boolean) => void;

labsLlmOvertemp: boolean;
setLabsLlmOvertemp: (labsLlmOvertemp: boolean) => void;

}

export const useUXLabsStore = create<UXLabsStore>()(
Expand All @@ -48,6 +51,9 @@ export const useUXLabsStore = create<UXLabsStore>()(
labsSplitBranching: false,
setLabsSplitBranching: (labsSplitBranching: boolean) => set({ labsSplitBranching }),

labsLlmOvertemp: false,
setLabsLlmOvertemp: (labsLlmOvertemp: boolean) => set({ labsLlmOvertemp }),

}),
{
name: 'app-ux-labs',
Expand Down
2 changes: 1 addition & 1 deletion src/modules/llms/transports/server/openai/openai.router.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ export type OpenAIAccessSchema = z.infer<typeof openAIAccessSchema>;

export const openAIModelSchema = z.object({
id: z.string(),
temperature: z.number().min(0).max(1).optional(),
temperature: z.number().min(0).max(2).optional(),
maxTokens: z.number().min(1).max(1000000),
});
export type OpenAIModelSchema = z.infer<typeof openAIModelSchema>;
Expand Down
9 changes: 7 additions & 2 deletions src/modules/llms/vendors/openai/OpenAILLMOptions.tsx
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import * as React from 'react';

import { FormSliderControl } from '~/common/components/forms/FormSliderControl';
import { useUXLabsStore } from '~/common/state/store-ux-labs';

import { DLLM, useModelsStore } from '../../store-llms';
import { LLMOptionsOpenAI } from './openai.vendor';
Expand All @@ -18,15 +19,19 @@ function normalizeOpenAIOptions(partialOptions?: Partial<LLMOptionsOpenAI>) {

export function OpenAILLMOptions(props: { llm: DLLM<unknown, LLMOptionsOpenAI> }) {

// external state
const labsLlmOvertemp = useUXLabsStore.getState().labsLlmOvertemp;

// derived state
const { id: llmId, maxOutputTokens, options } = props.llm;
const { llmResponseTokens, llmTemperature } = normalizeOpenAIOptions(options);

return <>

<FormSliderControl
title='Temperature' ariaLabel='Model Temperature'
description={llmTemperature < 0.33 ? 'More strict' : llmTemperature > 0.67 ? 'Larger freedom' : 'Creativity'}
min={0} max={1} step={0.1} defaultValue={0.5}
description={llmTemperature < 0.33 ? 'More strict' : llmTemperature > 1 ? 'Extra hot ♨️' : llmTemperature > 0.67 ? 'Larger freedom' : 'Creativity'}
min={0} max={labsLlmOvertemp ? 2 : 1} step={0.1} defaultValue={0.5}
valueLabelDisplay='on'
value={llmTemperature}
onChange={value => useModelsStore.getState().updateLLMOptions(llmId, { llmTemperature: value })}
Expand Down

1 comment on commit a41667f

@enricoros
Copy link
Owner Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Closes #256

Please sign in to comment.