From 0edeade7b4f20422f594f8c10eeb3a3144794213 Mon Sep 17 00:00:00 2001 From: Yulong Ruan Date: Tue, 4 Jun 2024 17:49:33 +0800 Subject: [PATCH] refactor to expose text2vega object to window object as a temporary solution Signed-off-by: Yulong Ruan --- .../public/components/vega_vis_editor.tsx | 17 +- src/plugins/vis_type_vega/public/plugin.ts | 7 +- .../vis_type_vega/public/text_to_vega.ts | 159 ++++++++---------- .../components/visualize_top_nav.tsx | 23 ++- 4 files changed, 95 insertions(+), 111 deletions(-) diff --git a/src/plugins/vis_type_vega/public/components/vega_vis_editor.tsx b/src/plugins/vis_type_vega/public/components/vega_vis_editor.tsx index 98332bd6305f..23c6d2975f3c 100644 --- a/src/plugins/vis_type_vega/public/components/vega_vis_editor.tsx +++ b/src/plugins/vis_type_vega/public/components/vega_vis_editor.tsx @@ -95,19 +95,16 @@ function VegaVisEditor({ stateParams, setValue }: VisOptionsProps) { useEffect(() => { const text2vega = getText2Vega(); - text2vega.getVega$().subscribe((v) => { - if (!(v instanceof Error)) { - setValue('spec', JSON.stringify(v, null, 4)); + const subscription = text2vega.getResult$().subscribe((result) => { + if (result) { + setValue('spec', JSON.stringify(result, null, 4)); } }); - if (window['input$']) { - window['input$'].subscribe((v: string) => { - text2vega.updateInput(v); - }); - } - // text2vega.updateInput('find unique visitors and average bytes every 3 hours'); - }, []); + return () => { + subscription.unsubscribe(); + }; + }, [setValue]); return (
diff --git a/src/plugins/vis_type_vega/public/plugin.ts b/src/plugins/vis_type_vega/public/plugin.ts index 006dea6759c9..75285031414b 100644 --- a/src/plugins/vis_type_vega/public/plugin.ts +++ b/src/plugins/vis_type_vega/public/plugin.ts @@ -133,6 +133,11 @@ export class VegaPlugin implements Plugin, void> { setSavedObjectsClient(core.savedObjects); setUiActions(uiActions); setInjectedMetadata(core.injectedMetadata); - setText2Vega(new Text2Vega(core.http)); + + const text2vega = new Text2Vega(core.http); + setText2Vega(text2vega); + // quick hack to share llm execution context among plugins + // TODO: refactor this to not use window to share context + (window as any).llm = { text2vega }; } } diff --git a/src/plugins/vis_type_vega/public/text_to_vega.ts b/src/plugins/vis_type_vega/public/text_to_vega.ts index 2c5905ca7b18..f217b1bd6c38 100644 --- a/src/plugins/vis_type_vega/public/text_to_vega.ts +++ b/src/plugins/vis_type_vega/public/text_to_vega.ts @@ -1,12 +1,5 @@ -import { BehaviorSubject, Observable } from 'rxjs'; -import { - debounceTime, - distinctUntilChanged, - switchMap, - tap, - filter, - finalize, -} from 'rxjs/operators'; +import { BehaviorSubject, Observable, of } from 'rxjs'; +import { debounceTime, switchMap, tap, filter, catchError } from 'rxjs/operators'; import { HttpSetup } from 'opensearch-dashboards/public'; const topN = (ppl: string, n: number) => `${ppl} | head ${n}`; @@ -35,103 +28,87 @@ Just reply with the json based Vega-Lite object, do not include any other conten `; }; -const llmRunning$ = new BehaviorSubject(false); -// @ts-ignore -window['llmRunning$'] = llmRunning$; - export class Text2Vega { - input$: BehaviorSubject; - vega$: Observable; + input$ = new BehaviorSubject({ input: '' }); + result$: Observable; + status$ = new BehaviorSubject<'RUNNING' | 'STOPPED'>('STOPPED'); http: HttpSetup; constructor(http: HttpSetup) { this.http = http; - this.input$ = new BehaviorSubject(''); - this.vega$ = this.input$.pipe( - filter((v) => v.length > 0), - debounceTime(200), - distinctUntilChanged(), - tap((v) => llmRunning$.next(true)), - // text to ppl - switchMap(async (value) => { - const pplQuestion = value.split('//')[0]; - try { - const ppl = await this.text2ppl(pplQuestion); - return { - input: value, - ppl, - }; - } catch (e) { - return new Error('Cannot generate ppl'); - } - }), - // query sample data with ppl - switchMap(async (value) => { - if (value instanceof Error) { - return value; - } - const ppl = topN(value.ppl, 2); - const sample = await this.http.post('/api/ppl/search', { - body: JSON.stringify({ query: ppl, format: 'jdbc' }), - }); - return { ...value, sample }; - }), - // call llm to generate vega - switchMap(async (value) => { - if (value instanceof Error) { - return value; - } - const prompt = createPrompt(value.input, value.ppl, value.sample); - const result = await this.text2vega(prompt); - delete result.data['values']; - result.data.url = { - '%type%': 'ppl', - query: value.ppl, - }; - return result; - }), - tap(() => llmRunning$.next(false)), - finalize(() => llmRunning$.next(false)) - ); + this.result$ = this.input$ + .pipe( + filter((v) => v.input.length > 0), + debounceTime(200), + tap(() => this.status$.next('RUNNING')) + ) + .pipe( + switchMap((v) => + of(v.input).pipe( + // text to ppl + switchMap(async (value) => { + const pplQuestion = value.split('//')[0]; + const ppl = await this.text2ppl(pplQuestion); + return { + input: value, + ppl, + }; + }), + // query sample data with ppl + switchMap(async (value) => { + const ppl = topN(value.ppl, 2); + const sample = await this.http.post('/api/ppl/search', { + body: JSON.stringify({ query: ppl, format: 'jdbc' }), + }); + return { ...value, sample }; + }), + // call llm to generate vega + switchMap(async (value) => { + const prompt = createPrompt(value.input, value.ppl, value.sample); + const result = await this.text2vega(prompt); + result.data = { + url: { + '%type%': 'ppl', + query: value.ppl, + }, + }; + return result; + }), + catchError((e) => of({ error: e })) + ) + ) + ) + .pipe(tap(() => this.status$.next('STOPPED'))); } async text2vega(query: string) { - try { - const res = await this.http.post('/api/llm/text2vega', { - body: JSON.stringify({ query }), - }); - console.log('llm res: ', res); - // return res; - const result = res.body.inference_results[0].output[0].dataAsMap; - return result; - } catch (e) { - console.log(e); - } + const res = await this.http.post('/api/llm/text2vega', { + body: JSON.stringify({ query }), + }); + const result = res.body.inference_results[0].output[0].dataAsMap; + return result; } async text2ppl(query: string) { - try { - const pplResponse = await this.http.post('/api/llm/text2ppl', { - body: JSON.stringify({ - question: query, - index: 'opensearch_dashboards_sample_data_logs', - }), - }); - // eslint-disable-next-line no-console - console.log(pplResponse); - const result = JSON.parse(pplResponse.body.inference_results[0].output[0].result); - console.log(result); - return result.ppl; - } catch (e) { - console.log(e); - } + const pplResponse = await this.http.post('/api/llm/text2ppl', { + body: JSON.stringify({ + question: query, + index: 'opensearch_dashboards_sample_data_logs', + }), + }); + const result = JSON.parse(pplResponse.body.inference_results[0].output[0].result); + return result.ppl; } - updateInput(value: string) { + invoke(value: { input: string }) { this.input$.next(value); } - getVega$() { - return this.vega$; + getStatus$() { + return this.status$; + } + + getResult$() { + return this.result$; } } diff --git a/src/plugins/visualize/public/application/components/visualize_top_nav.tsx b/src/plugins/visualize/public/application/components/visualize_top_nav.tsx index d01cff288f65..9a0611c6f1a9 100644 --- a/src/plugins/visualize/public/application/components/visualize_top_nav.tsx +++ b/src/plugins/visualize/public/application/components/visualize_top_nav.tsx @@ -53,7 +53,6 @@ import { APP_NAME } from '../visualize_constants'; import { getTopNavConfig } from '../utils'; import type { IndexPattern } from '../../../../data/public'; import chatLogo from './query_assistant_logo.svg'; -import { BehaviorSubject } from 'rxjs'; interface VisualizeTopNavProps { currentAppState: VisualizeAppState; @@ -72,10 +71,6 @@ interface VisualizeTopNavProps { onPPL?: (ppl: string) => void; } -const input$ = new BehaviorSubject(''); -// @ts-ignore -window['input$'] = input$; - const TopNav = ({ currentAppState, isChromeVisible, @@ -235,9 +230,19 @@ const TopNav = ({ }, []); useEffect(() => { - window['llmRunning$'].subscribe((running) => { - setGenerating(!!running); - }); + const subscription = (window as any).llm.text2vega + .getStatus$() + .subscribe((status: 'STOPPED' | 'RUNNING') => { + if (status === 'STOPPED') { + setGenerating(false); + } + if (status === 'RUNNING') { + setGenerating(true); + } + }); + return () => { + subscription.unsubscribe(); + }; }, []); const HARDCODED_SUGGESTIONS: string[] = [ @@ -249,7 +254,7 @@ const TopNav = ({ const indexName = 'opensearch_dashboards_sample_data_logs'; const onGenerate = async () => { - input$.next(value); + (window as any).llm.text2vega.invoke({ input: value }); }; return isChromeVisible ? (