Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Query api #289

Closed
wants to merge 16 commits into from
Closed
Show file tree
Hide file tree
Changes from 13 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions src/@types/parseable/api/query.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,21 @@ export type LogsQuery = {
streamName: string;
startTime: Date;
endTime: Date;
access:string[]|null;
access: string[] | null;
};

export enum SortOrder {
ASCENDING = 1,
DESCENDING = -1
DESCENDING = -1,
}

export type LogsSearch = {
search: string;
filters: Record<string, string[]>;
sort: {
key: string,
order: SortOrder
}
key: string;
order: SortOrder;
};
};

export type LogsData = {
Expand All @@ -35,8 +35,8 @@ export type Log = {
};

export type LogSelectedTimeRange = {
state : "fixed"| "custom";
value : string;
state: 'fixed' | 'custom';
value: string;
};

export type UserRoles = {
Expand Down
32 changes: 20 additions & 12 deletions src/@types/parseable/api/stream.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { Field } from '../dataType';
import { Log } from './query';

export type LogStreamData = Array<{ name: string }>;

Expand All @@ -7,6 +8,11 @@ export type LogStreamSchemaData = {
metadata: Record<string, string>;
};

export type LogStreamQueryWithFields = {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is not a query. Rename the type.

fields: string[];
records: Log[];
};

export type LogStreamStat = {
ingestion: {
count: number;
Expand Down Expand Up @@ -34,24 +40,26 @@ export type action = {
};

export type StreamInfo = {
"created-at": string;
"first-event-at": string;
'created-at': string;
'first-event-at': string;
cache_enabled: boolean;
time_partition: string;
static_schema_flag: boolean;
time_partition_limit: string,
custom_partition: string,
}
time_partition_limit: string;
custom_partition: string;
};

export type LogStreamRetention = Array<action>;

export type HotTierConfig = {
size: string;
used_size: string;
available_size: string;
oldest_date_time_entry: string;
} | {};
export type HotTierConfig =
| {
size: string;
used_size: string;
available_size: string;
oldest_date_time_entry: string;
}
| {};

export type UpdateHotTierConfig = {
size: string;
}
};
4 changes: 2 additions & 2 deletions src/api/cluster.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ export const getIngestorInfo = (domain_name: string | null, startTime: Date, end
const query = `SELECT * FROM pmeta where address = '${domain_name}' ORDER BY event_time DESC LIMIT 10 OFFSET 0`;

return Axios().post<IngestorQueryRecord[]>(
LOG_QUERY_URL,
LOG_QUERY_URL(),
{
query,
startTime,
Expand All @@ -26,4 +26,4 @@ export const getClusterMetrics = () => {

export const deleteIngestor = (ingestorUrl: string) => {
return Axios().delete(INGESTOR_DELETE_URL(ingestorUrl));
};
};
5 changes: 4 additions & 1 deletion src/api/constants.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@
import { paramsParser } from '@/utils/URLParamsParser';

const API_V1 = 'api/v1';

// Streams Management
export const LOG_STREAM_LIST_URL = `${API_V1}/logstream`;
export const LOG_STREAMS_SCHEMA_URL = (streamName: string) => `${LOG_STREAM_LIST_URL}/${streamName}/schema`;
export const LOG_QUERY_URL = `${API_V1}/query`;
export const LOG_QUERY_URL = (params?: Record<string, string>) =>
params ? `${API_V1}/query?${paramsParser(params)}` : `${API_V1}/query`;
export const LOG_STREAMS_ALERTS_URL = (streamName: string) => `${LOG_STREAM_LIST_URL}/${streamName}/alert`;
export const LIST_SAVED_FILTERS_URL = (userId: string) => `${API_V1}/filters/${userId}`;
export const UPDATE_SAVED_FILTERS_URL = (filterId: string) => `${API_V1}/filters/filter/${filterId}`;
Expand Down
12 changes: 6 additions & 6 deletions src/api/query.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,19 +11,19 @@ type QueryLogs = {
pageOffset: number;
};

type QueryParams = Record<string, string>;

// to optimize performace, it has been decided to round off the time at the given level
// so making the end-time inclusive
const optimizeEndTime = (endTime: Date) => {
return dayjs(endTime).add(1, 'minute').toDate();
};

export const getQueryLogs = (logsQuery: QueryLogs) => {
export const getQueryLogs = (logsQuery: QueryLogs, queryParams?: QueryParams) => {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this should always receive an object. Empty {} if no params.

const { startTime, endTime, streamName, limit, pageOffset } = logsQuery;

const query = `SELECT * FROM ${streamName} LIMIT ${limit} OFFSET ${pageOffset}`;

return Axios().post(
LOG_QUERY_URL,
LOG_QUERY_URL(queryParams && queryParams),
{
query,
startTime,
Expand All @@ -33,11 +33,11 @@ export const getQueryLogs = (logsQuery: QueryLogs) => {
);
};

export const getQueryResult = (logsQuery: LogsQuery, query = '') => {
export const getQueryResult = (logsQuery: LogsQuery, query = '', queryParams?: QueryParams) => {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this should always receive an object. Empty {} if no params.

const { startTime, endTime } = logsQuery;

return Axios().post(
LOG_QUERY_URL,
LOG_QUERY_URL(queryParams && queryParams),
{
query,
startTime,
Expand Down
4 changes: 0 additions & 4 deletions src/hooks/useGetLogStreamSchema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,17 @@ import { StatusCodes } from 'http-status-codes';
import useMountedState from './useMountedState';
import { Field } from '@/@types/parseable/dataType';
import { useAppStore } from '@/layouts/MainLayout/providers/AppProvider';
import { useLogsStore, logsStoreReducers } from '@/pages/Stream/providers/LogsProvider';
import { useStreamStore, streamStoreReducers } from '@/pages/Stream/providers/StreamProvider';
import { AxiosError } from 'axios';
import _ from 'lodash';

const { setStreamSchema } = streamStoreReducers;
const { setTableHeaders } = logsStoreReducers;

export const useGetLogStreamSchema = () => {
const [data, setData] = useMountedState<LogStreamSchemaData | null>(null);
const [error, setError] = useMountedState<string | null>(null);
const [loading, setLoading] = useMountedState<boolean>(false);
const [currentStream] = useAppStore((store) => store.currentStream);
const [, setLogsStore] = useLogsStore((_store) => null);
const [, setStreamStore] = useStreamStore((_store) => null);

const getDataSchema = async (stream: string | null = currentStream) => {
Expand All @@ -34,7 +31,6 @@ export const useGetLogStreamSchema = () => {

setData(schema);
setStreamStore((store) => setStreamSchema(store, schema));
setLogsStore((store) => setTableHeaders(store, schema));
break;
}
default: {
Expand Down
18 changes: 10 additions & 8 deletions src/hooks/useQueryLogs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ import { useLogsStore, logsStoreReducers, LOAD_LIMIT, isJqSearch } from '@/pages
import { useAppStore } from '@/layouts/MainLayout/providers/AppProvider';
import { useQueryResult } from './useQueryResult';
import _ from 'lodash';
import { useStreamStore } from '@/pages/Stream/providers/StreamProvider';
import { AxiosError } from 'axios';
import jqSearch from '@/utils/jqSearch';

Expand Down Expand Up @@ -43,7 +42,6 @@ export const useQueryLogs = () => {
},
});
const [currentStream] = useAppStore((store) => store.currentStream);
const [schema] = useStreamStore((store) => store.schema);
const [
{
timeRange,
Expand Down Expand Up @@ -88,24 +86,28 @@ export const useQueryLogs = () => {
setError(null);

const logsQueryRes = isQuerySearchActive
? await getQueryResult({ ...logsQuery, access: [] }, appendOffsetToQuery(custSearchQuery, logsQuery.pageOffset))
: await getQueryLogs(logsQuery);
? await getQueryResult(
{ ...logsQuery, access: [] },
appendOffsetToQuery(custSearchQuery, logsQuery.pageOffset),
{ fields: 'true' },
)
: await getQueryLogs(logsQuery, { fields: 'true' });

const data = logsQueryRes.data;
const data = logsQueryRes.data.records;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There is a possibility where server could just send you {} or null.
Verify you always get a result that contains a records array even if there is no data.


if (logsQueryRes.status === StatusCodes.OK) {
const jqFilteredData = isJqSearch(instantSearchValue) ? await jqSearch(data, instantSearchValue) : [];
return setLogsStore((store) => setData(store, data, schema, jqFilteredData));
return setLogsStore((store) => setData(store, data, logsQueryRes.data, jqFilteredData));
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

return setLogsStore((store) => setData(store, data, logsQueryRes.data, jqFilteredData));

why do both data, logsQueryRes.data exist here?

}
if (typeof data === 'string' && data.includes('Stream is not initialized yet')) {
return setLogsStore((store) => setData(store, [], schema));
return setLogsStore((store) => setData(store, [], logsQueryRes.data));
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why are you trying to set the data when the stream is not initialized ?
I don't think this logsQueryRes.data will hold what you expect in this.

}
setError('Failed to query log');
} catch (e) {
const axiosError = e as AxiosError;
const errorMessage = axiosError?.response?.data;
setError(_.isString(errorMessage) && !_.isEmpty(errorMessage) ? errorMessage : 'Failed to query log');
return setLogsStore((store) => setData(store, [], schema));
return setLogsStore((store) => setData(store, [], null));
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Explain why's the third argument is null ?

} finally {
setLoading(false);
}
Expand Down
7 changes: 5 additions & 2 deletions src/pages/Stream/Views/Explore/useLogsFetcher.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,25 +2,28 @@ import { useAppStore } from '@/layouts/MainLayout/providers/AppProvider';
import { useEffect } from 'react';
import { useLogsStore, logsStoreReducers } from '../../providers/LogsProvider';
import { useQueryLogs } from '@/hooks/useQueryLogs';
import { useGetLogStreamSchema } from '@/hooks/useGetLogStreamSchema';

const { setCleanStoreForStreamChange } = logsStoreReducers;

const useLogsFetcher = (props: {schemaLoading: boolean}) => {
const {schemaLoading} = props;
const useLogsFetcher = (props: { schemaLoading: boolean }) => {
const { schemaLoading } = props;
const [currentStream] = useAppStore((store) => store.currentStream);
const [tableOpts, setLogsStore] = useLogsStore((store) => store.tableOpts);
const { currentOffset, currentPage, pageData } = tableOpts;
const { getQueryData, loading: logsLoading, error: errorMessage, fetchCount, isFetchingCount } = useQueryLogs();
const hasContentLoaded = schemaLoading === false && logsLoading === false;
const hasNoData = hasContentLoaded && !errorMessage && pageData.length === 0;
const showTable = hasContentLoaded && !hasNoData && !errorMessage;
const { getDataSchema } = useGetLogStreamSchema();

useEffect(() => {
setLogsStore(setCleanStoreForStreamChange);
}, [currentStream]);

useEffect(() => {
if (currentPage === 0 && currentOffset === 0) {
getDataSchema();
getQueryData();
fetchCount();
}
Expand Down
35 changes: 24 additions & 11 deletions src/pages/Stream/providers/LogsProvider.tsx
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import { Log } from '@/@types/parseable/api/query';
import { LogStreamData, LogStreamSchemaData } from '@/@types/parseable/api/stream';
import { LogStreamData, LogStreamQueryWithFields, LogStreamSchemaData } from '@/@types/parseable/api/stream';
import { FIXED_DURATIONS, FixedDuration } from '@/constants/timeConstants';
import initContext from '@/utils/initContext';
import dayjs, { Dayjs } from 'dayjs';
import { addOrRemoveElement } from '@/utils';
import { getPageSlice, makeHeadersFromSchema, makeHeadersfromData } from '../utils';
import { getPageSlice, makeHeadersFromQueryFields, makeHeadersfromData } from '../utils';
import _ from 'lodash';
import { sanitizeCSVData } from '@/utils/exportHelpers';

Expand Down Expand Up @@ -260,7 +260,12 @@ type LogsStoreReducers = {
getCleanStoreForRefetch: (store: LogsStore) => ReducerOutput;

// data reducers
setData: (store: LogsStore, data: Log[], schema: LogStreamSchemaData | null, jqFilteredData?: Log[]) => ReducerOutput;
setData: (
store: LogsStore,
data: Log[],
queryResponse: LogStreamQueryWithFields | null,
Copy link
Contributor

@balaji-jr balaji-jr Aug 12, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

data: Log[],
queryResponse: LogStreamQueryWithFields | null

With logs provider context, both deal with almost the same values. Rmv one with less significant.

jqFilteredData?: Log[],
) => ReducerOutput;
setStreamSchema: (store: LogsStore, schema: LogStreamSchemaData) => ReducerOutput;
applyCustomQuery: (
store: LogsStore,
Expand All @@ -272,7 +277,7 @@ type LogsStoreReducers = {
getUniqueValues: (data: Log[], key: string) => string[];
makeExportData: (data: Log[], headers: string[], type: string) => Log[];
setRetention: (store: LogsStore, retention: { description: string; duration: string }) => ReducerOutput;
setTableHeaders: (store: LogsStore, schema: LogStreamSchemaData) => ReducerOutput;
setTableHeaders: (store: LogsStore, queryResponse: LogStreamQueryWithFields) => ReducerOutput;
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

for setting table headers, why'd you need entire LogStreamQueryWithFields


setCleanStoreForStreamChange: (store: LogsStore) => ReducerOutput;
updateSavedFilterId: (store: LogsStore, savedFilterId: string | null) => ReducerOutput;
Expand Down Expand Up @@ -359,7 +364,7 @@ const setTimeRange = (
return {
...cleanStore,
timeRange: { ...store.timeRange, startTime: startTime.toDate(), endTime: endTime.toDate(), label, interval, type },
viewMode: store.viewMode
viewMode: store.viewMode,
};
};

Expand Down Expand Up @@ -527,13 +532,13 @@ const searchAndSortData = (opts: { searchValue: string }, data: Log[]) => {
return sortedData;
};

const setTableHeaders = (store: LogsStore, schema: LogStreamSchemaData) => {
const setTableHeaders = (store: LogsStore, queryResponse: LogStreamQueryWithFields) => {
const { data: existingData, custQuerySearchState, tableOpts } = store;
const { filteredData } = existingData;
const newHeaders =
filteredData && custQuerySearchState.isQuerySearchActive
? makeHeadersfromData(filteredData)
: makeHeadersFromSchema(schema);
: makeHeadersFromQueryFields(queryResponse);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's straight forward. You do not need a function here.

return {
tableOpts: {
...tableOpts,
Expand All @@ -546,7 +551,12 @@ export const isJqSearch = (value: string) => {
return _.startsWith(value, 'jq .');
};

const setData = (store: LogsStore, data: Log[], schema: LogStreamSchemaData | null, jqFilteredData?: Log[]) => {
const setData = (
store: LogsStore,
data: Log[],
queryResponse: LogStreamQueryWithFields | null,
jqFilteredData?: Log[],
) => {
const {
data: existingData,
tableOpts,
Expand All @@ -563,7 +573,7 @@ const setData = (store: LogsStore, data: Log[], schema: LogStreamSchemaData | nu
: filterAndSortData(tableOpts, data);
const newPageSlice = filteredData && getPageSlice(currentPage, tableOpts.perPage, filteredData);
const newHeaders =
isQuerySearchActive && activeMode === 'sql' ? makeHeadersfromData(data) : makeHeadersFromSchema(schema);
isQuerySearchActive && activeMode === 'sql' ? makeHeadersfromData(data) : makeHeadersFromQueryFields(queryResponse);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why'd you want to make headers from data when
isQuerySearchActive && activeMode === 'sql'


return {
tableOpts: {
Expand Down Expand Up @@ -871,7 +881,10 @@ const toggleSideBar = (store: LogsStore) => {

const onToggleView = (store: LogsStore, viewMode: 'json' | 'table') => {
const { data, tableOpts } = store;
const filteredData = filterAndSortData({ sortOrder: defaultSortOrder, sortKey: defaultSortKey, filters: {} }, data.rawData);
const filteredData = filterAndSortData(
{ sortOrder: defaultSortOrder, sortKey: defaultSortKey, filters: {} },
data.rawData,
);
const currentPage = tableOpts.currentPage;
const newPageSlice = getPageSlice(currentPage, tableOpts.perPage, filteredData);

Expand All @@ -888,7 +901,7 @@ const onToggleView = (store: LogsStore, viewMode: 'json' | 'table') => {
currentPage,
totalPages: getTotalPages(filteredData, tableOpts.perPage),
},
viewMode
viewMode,
};
};

Expand Down
Loading
Loading