Skip to content

Commit

Permalink
Persona Creation: use cancelable streaming, - Fixes #316, #328.
Browse files Browse the repository at this point in the history
  • Loading branch information
enricoros committed Jan 9, 2024
1 parent 91b770d commit e2e7ea9
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 7 deletions.
4 changes: 4 additions & 0 deletions src/apps/personas/PersonaCreator.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ export function PersonaCreator() {
chainProgress,
chainIntermediates,
chainStepName,
chainStepInterimChars,
chainOutput,
chainError,
abortChain,
Expand Down Expand Up @@ -214,6 +215,9 @@ export function PersonaCreator() {
{chainStepName}
</Typography>
<LinearProgress color='success' determinate value={Math.max(10, 100 * chainProgress)} sx={{ mt: 1.5 }} />
<Typography level='title-sm' sx={{ mt: 1 }}>
{chainStepInterimChars === null ? 'Loading ...' : 'Generating ...'}
</Typography>
</Box>
<Typography level='title-sm'>
This may take 1-2 minutes. Do not close this window or the progress will be lost.
Expand Down
25 changes: 18 additions & 7 deletions src/apps/personas/useLLMChain.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import * as React from 'react';

import { DLLMId, useModelsStore } from '~/modules/llms/store-llms';
import { llmChatGenerateOrThrow, VChatMessageIn } from '~/modules/llms/llm.client';
import { llmStreamingChatGenerate, VChatMessageIn } from '~/modules/llms/llm.client';


export interface LLMChainStep {
Expand All @@ -21,6 +21,7 @@ export function useLLMChain(steps: LLMChainStep[], llmId: DLLMId | undefined, ch
// state
const [chain, setChain] = React.useState<ChainState | null>(null);
const [error, setError] = React.useState<string | null>(null);
const [chainStepInterimText, setChainStepInterimText] = React.useState<string | null>(null);
const chainAbortController = React.useRef(new AbortController());

// restart Chain on inputs change
Expand Down Expand Up @@ -81,21 +82,30 @@ export function useLLMChain(steps: LLMChainStep[], llmId: DLLMId | undefined, ch
const globalToStepListener = () => stepAbortController.abort('chain aborted');
_chainAbortController.signal.addEventListener('abort', globalToStepListener);

// LLM call
llmChatGenerateOrThrow(llmId, llmChatInput, null, null, chain.overrideResponseTokens ?? undefined)
.then(({ content }) => {
stepDone = true;
// interim text
let interimText = '';
setChainStepInterimText(null);

// LLM call (streaming, cancelable)
llmStreamingChatGenerate(llmId, llmChatInput, null, null, stepAbortController.signal,
(update) => {
update.text && setChainStepInterimText(interimText = update.text);
})
.then(() => {
if (stepAbortController.signal.aborted)
return;
const chainState = updateChainState(chain, llmChatInput, stepIdx, content);
const chainState = updateChainState(chain, llmChatInput, stepIdx, interimText);
if (chainState.output && onSuccess)
onSuccess(chainState.output);
setChain(chainState);
})
.catch((err) => {
stepDone = true;
if (!stepAbortController.signal.aborted)
setError(`Transformation error: ${err?.message || err?.toString() || err || 'unknown'}`);
})
.finally(() => {
stepDone = true;
setChainStepInterimText(null);
});

// abort if unmounted before the LLM call ends, or if the full chain has been aborted
Expand All @@ -113,6 +123,7 @@ export function useLLMChain(steps: LLMChainStep[], llmId: DLLMId | undefined, ch
chainOutput: chain?.output ?? null,
chainProgress: chain?.progress ?? 0,
chainStepName: chain?.steps?.find((step) => !step.isComplete)?.ref.name ?? null,
chainStepInterimChars: chainStepInterimText?.length ?? null,
chainIntermediates: chain?.steps?.map((step) => step.output ?? null)?.filter(out => out) ?? [],
chainError: error,
abortChain: () => {
Expand Down

0 comments on commit e2e7ea9

Please sign in to comment.