Skip to content

Commit

Permalink
refactor to expose text2vega object to window object as a temporary
Browse files Browse the repository at this point in the history
solution

Signed-off-by: Yulong Ruan <[email protected]>
  • Loading branch information
ruanyl committed Jun 4, 2024
1 parent bb8a475 commit 0edeade
Show file tree
Hide file tree
Showing 4 changed files with 95 additions and 111 deletions.
17 changes: 7 additions & 10 deletions src/plugins/vis_type_vega/public/components/vega_vis_editor.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -95,19 +95,16 @@ function VegaVisEditor({ stateParams, setValue }: VisOptionsProps<VisParams>) {

useEffect(() => {
const text2vega = getText2Vega();
text2vega.getVega$().subscribe((v) => {
if (!(v instanceof Error)) {
setValue('spec', JSON.stringify(v, null, 4));
const subscription = text2vega.getResult$().subscribe((result) => {
if (result) {
setValue('spec', JSON.stringify(result, null, 4));
}
});

if (window['input$']) {
window['input$'].subscribe((v: string) => {
text2vega.updateInput(v);
});
}
// text2vega.updateInput('find unique visitors and average bytes every 3 hours');
}, []);
return () => {
subscription.unsubscribe();
};
}, [setValue]);

return (
<div className="vgaEditor">
Expand Down
7 changes: 6 additions & 1 deletion src/plugins/vis_type_vega/public/plugin.ts
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,11 @@ export class VegaPlugin implements Plugin<Promise<void>, void> {
setSavedObjectsClient(core.savedObjects);
setUiActions(uiActions);
setInjectedMetadata(core.injectedMetadata);
setText2Vega(new Text2Vega(core.http));

const text2vega = new Text2Vega(core.http);
setText2Vega(text2vega);
// quick hack to share llm execution context among plugins
// TODO: refactor this to not use window to share context
(window as any).llm = { text2vega };
}
}
159 changes: 68 additions & 91 deletions src/plugins/vis_type_vega/public/text_to_vega.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,5 @@
import { BehaviorSubject, Observable } from 'rxjs';
import {
debounceTime,
distinctUntilChanged,
switchMap,
tap,
filter,
finalize,
} from 'rxjs/operators';
import { BehaviorSubject, Observable, of } from 'rxjs';
import { debounceTime, switchMap, tap, filter, catchError } from 'rxjs/operators';
import { HttpSetup } from 'opensearch-dashboards/public';

const topN = (ppl: string, n: number) => `${ppl} | head ${n}`;
Expand Down Expand Up @@ -35,103 +28,87 @@ Just reply with the json based Vega-Lite object, do not include any other conten
`;
};

const llmRunning$ = new BehaviorSubject(false);
// @ts-ignore
window['llmRunning$'] = llmRunning$;

export class Text2Vega {
input$: BehaviorSubject<string>;
vega$: Observable<string | Error>;
input$ = new BehaviorSubject({ input: '' });
result$: Observable<string | Error>;
status$ = new BehaviorSubject<'RUNNING' | 'STOPPED'>('STOPPED');
http: HttpSetup;

constructor(http: HttpSetup) {
this.http = http;
this.input$ = new BehaviorSubject('');
this.vega$ = this.input$.pipe(
filter((v) => v.length > 0),
debounceTime(200),
distinctUntilChanged(),
tap((v) => llmRunning$.next(true)),
// text to ppl
switchMap(async (value) => {
const pplQuestion = value.split('//')[0];
try {
const ppl = await this.text2ppl(pplQuestion);
return {
input: value,
ppl,
};
} catch (e) {
return new Error('Cannot generate ppl');
}
}),
// query sample data with ppl
switchMap(async (value) => {
if (value instanceof Error) {
return value;
}
const ppl = topN(value.ppl, 2);
const sample = await this.http.post('/api/ppl/search', {
body: JSON.stringify({ query: ppl, format: 'jdbc' }),
});
return { ...value, sample };
}),
// call llm to generate vega
switchMap(async (value) => {
if (value instanceof Error) {
return value;
}
const prompt = createPrompt(value.input, value.ppl, value.sample);
const result = await this.text2vega(prompt);
delete result.data['values'];
result.data.url = {
'%type%': 'ppl',
query: value.ppl,
};
return result;
}),
tap(() => llmRunning$.next(false)),
finalize(() => llmRunning$.next(false))
);
this.result$ = this.input$
.pipe(
filter((v) => v.input.length > 0),
debounceTime(200),
tap(() => this.status$.next('RUNNING'))
)
.pipe(
switchMap((v) =>
of(v.input).pipe(
// text to ppl
switchMap(async (value) => {
const pplQuestion = value.split('//')[0];
const ppl = await this.text2ppl(pplQuestion);
return {
input: value,
ppl,
};
}),
// query sample data with ppl
switchMap(async (value) => {
const ppl = topN(value.ppl, 2);
const sample = await this.http.post('/api/ppl/search', {
body: JSON.stringify({ query: ppl, format: 'jdbc' }),
});
return { ...value, sample };
}),
// call llm to generate vega
switchMap(async (value) => {
const prompt = createPrompt(value.input, value.ppl, value.sample);
const result = await this.text2vega(prompt);
result.data = {
url: {
'%type%': 'ppl',
query: value.ppl,
},
};
return result;
}),
catchError((e) => of({ error: e }))
)
)
)
.pipe(tap(() => this.status$.next('STOPPED')));
}

async text2vega(query: string) {
try {
const res = await this.http.post('/api/llm/text2vega', {
body: JSON.stringify({ query }),
});
console.log('llm res: ', res);
// return res;
const result = res.body.inference_results[0].output[0].dataAsMap;
return result;
} catch (e) {
console.log(e);
}
const res = await this.http.post('/api/llm/text2vega', {
body: JSON.stringify({ query }),
});
const result = res.body.inference_results[0].output[0].dataAsMap;
return result;
}

async text2ppl(query: string) {
try {
const pplResponse = await this.http.post('/api/llm/text2ppl', {
body: JSON.stringify({
question: query,
index: 'opensearch_dashboards_sample_data_logs',
}),
});
// eslint-disable-next-line no-console
console.log(pplResponse);
const result = JSON.parse(pplResponse.body.inference_results[0].output[0].result);
console.log(result);
return result.ppl;
} catch (e) {
console.log(e);
}
const pplResponse = await this.http.post('/api/llm/text2ppl', {
body: JSON.stringify({
question: query,
index: 'opensearch_dashboards_sample_data_logs',
}),
});
const result = JSON.parse(pplResponse.body.inference_results[0].output[0].result);
return result.ppl;
}

updateInput(value: string) {
invoke(value: { input: string }) {
this.input$.next(value);
}

getVega$() {
return this.vega$;
getStatus$() {
return this.status$;
}

getResult$() {
return this.result$;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@ import { APP_NAME } from '../visualize_constants';
import { getTopNavConfig } from '../utils';
import type { IndexPattern } from '../../../../data/public';
import chatLogo from './query_assistant_logo.svg';
import { BehaviorSubject } from 'rxjs';

interface VisualizeTopNavProps {
currentAppState: VisualizeAppState;
Expand All @@ -72,10 +71,6 @@ interface VisualizeTopNavProps {
onPPL?: (ppl: string) => void;
}

const input$ = new BehaviorSubject('');
// @ts-ignore
window['input$'] = input$;

const TopNav = ({
currentAppState,
isChromeVisible,
Expand Down Expand Up @@ -235,9 +230,19 @@ const TopNav = ({
}, []);

useEffect(() => {
window['llmRunning$'].subscribe((running) => {
setGenerating(!!running);
});
const subscription = (window as any).llm.text2vega
.getStatus$()
.subscribe((status: 'STOPPED' | 'RUNNING') => {
if (status === 'STOPPED') {
setGenerating(false);
}
if (status === 'RUNNING') {
setGenerating(true);
}
});
return () => {
subscription.unsubscribe();
};
}, []);

const HARDCODED_SUGGESTIONS: string[] = [
Expand All @@ -249,7 +254,7 @@ const TopNav = ({

const indexName = 'opensearch_dashboards_sample_data_logs';
const onGenerate = async () => {
input$.next(value);
(window as any).llm.text2vega.invoke({ input: value });
};

return isChromeVisible ? (
Expand Down

0 comments on commit 0edeade

Please sign in to comment.