Skip to content

Commit

Permalink
[ML] New Platform server shim: update file data visualizer routes to …
Browse files Browse the repository at this point in the history
…use new platform router (#56972)

* [ML] change import endpoint call to fileupload plugin, update file analyzer endpoint

* [ML] add apiDoc annotation

* [ML] AnalysisResult interface, remove url from apidoc.json

* [ML] delete import_data.js

* [ML] remove caching code, address PR comments

* [ML] file import

* [ML] apidoc

* [ML] schema validation

Co-authored-by: Elastic Machine <[email protected]>
  • Loading branch information
darnautov and elasticmachine authored Feb 7, 2020
1 parent 02f309c commit 4776cd9
Show file tree
Hide file tree
Showing 7 changed files with 319 additions and 185 deletions.

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/

import Boom from 'boom';
import { RequestHandlerContext } from 'kibana/server';

export type InputData = any[];

export interface InputOverrides {
[key: string]: string;
}

export type FormattedOverrides = InputOverrides & {
column_names: string[];
has_header_row: boolean;
should_trim_fields: boolean;
};

export interface AnalysisResult {
results: {
charset: string;
has_header_row: boolean;
has_byte_order_marker: boolean;
format: string;
field_stats: {
[fieldName: string]: {
count: number;
cardinality: number;
top_hits: Array<{ count: number; value: any }>;
};
};
sample_start: string;
num_messages_analyzed: number;
mappings: {
[fieldName: string]: {
type: string;
};
};
quote: string;
delimiter: string;
need_client_timezone: boolean;
num_lines_analyzed: number;
column_names: string[];
};
overrides?: FormattedOverrides;
}

export function fileDataVisualizerProvider(context: RequestHandlerContext) {
async function analyzeFile(data: any, overrides: any): Promise<AnalysisResult> {
let results = [];

try {
results = await context.ml!.mlClient.callAsCurrentUser('ml.fileStructure', {
body: data,
...overrides,
});
} catch (error) {
const err = error.message !== undefined ? error.message : error;
throw Boom.badRequest(err);
}

const { hasOverrides, reducedOverrides } = formatOverrides(overrides);

return {
...(hasOverrides && { overrides: reducedOverrides }),
results,
};
}

return {
analyzeFile,
};
}

function formatOverrides(overrides: InputOverrides) {
let hasOverrides = false;

const reducedOverrides: FormattedOverrides = Object.keys(overrides).reduce((acc, overrideKey) => {
const overrideValue: string = overrides[overrideKey];
if (overrideValue !== '') {
if (overrideKey === 'column_names') {
acc.column_names = overrideValue.split(',');
} else if (overrideKey === 'has_header_row') {
acc.has_header_row = overrideValue === 'true';
} else if (overrideKey === 'should_trim_fields') {
acc.should_trim_fields = overrideValue === 'true';
} else {
acc[overrideKey] = overrideValue;
}

hasOverrides = true;
}
return acc;
}, {} as FormattedOverrides);

return {
reducedOverrides,
hasOverrides,
};
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,43 @@
* you may not use this file except in compliance with the Elastic License.
*/

import { RequestHandlerContext } from 'kibana/server';
import { INDEX_META_DATA_CREATED_BY } from '../../../common/constants/file_datavisualizer';
import { InputData } from './file_data_visualizer';

export function importDataProvider(callWithRequest) {
async function importData(id, index, settings, mappings, ingestPipeline, data) {
export interface Settings {
pipeline?: string;
index: string;
body: any[];
[key: string]: any;
}

export interface Mappings {
[key: string]: any;
}

export interface InjectPipeline {
id: string;
pipeline: any;
}

interface Failure {
item: number;
reason: string;
doc: any;
}

export function importDataProvider(context: RequestHandlerContext) {
const callAsCurrentUser = context.ml!.mlClient.callAsCurrentUser;

async function importData(
id: string,
index: string,
settings: Settings,
mappings: Mappings,
ingestPipeline: InjectPipeline,
data: InputData
) {
let createdIndex;
let createdPipelineId;
const docCount = data.length;
Expand Down Expand Up @@ -35,7 +68,7 @@ export function importDataProvider(callWithRequest) {
createdPipelineId = pipelineId;
}

let failures = [];
let failures: Failure[] = [];
if (data.length) {
const resp = await indexData(index, createdPipelineId, data);
if (resp.success === false) {
Expand Down Expand Up @@ -72,8 +105,8 @@ export function importDataProvider(callWithRequest) {
}
}

async function createIndex(index, settings, mappings) {
const body = {
async function createIndex(index: string, settings: Settings, mappings: Mappings) {
const body: { mappings: Mappings; settings?: Settings } = {
mappings: {
_meta: {
created_by: INDEX_META_DATA_CREATED_BY,
Expand All @@ -86,23 +119,23 @@ export function importDataProvider(callWithRequest) {
body.settings = settings;
}

await callWithRequest('indices.create', { index, body });
await callAsCurrentUser('indices.create', { index, body });
}

async function indexData(index, pipelineId, data) {
async function indexData(index: string, pipelineId: string, data: InputData) {
try {
const body = [];
for (let i = 0; i < data.length; i++) {
body.push({ index: {} });
body.push(data[i]);
}

const settings = { index, body };
const settings: Settings = { index, body };
if (pipelineId !== undefined) {
settings.pipeline = pipelineId;
}

const resp = await callWithRequest('bulk', settings);
const resp = await callAsCurrentUser('bulk', settings);
if (resp.errors) {
throw resp;
} else {
Expand All @@ -113,7 +146,7 @@ export function importDataProvider(callWithRequest) {
};
}
} catch (error) {
let failures = [];
let failures: Failure[] = [];
let ingestError = false;
if (error.errors !== undefined && Array.isArray(error.items)) {
// an expected error where some or all of the bulk request
Expand All @@ -134,11 +167,11 @@ export function importDataProvider(callWithRequest) {
}
}

async function createPipeline(id, pipeline) {
return await callWithRequest('ingest.putPipeline', { id, body: pipeline });
async function createPipeline(id: string, pipeline: any) {
return await callAsCurrentUser('ingest.putPipeline', { id, body: pipeline });
}

function getFailures(items, data) {
function getFailures(items: any[], data: InputData): Failure[] {
const failures = [];
for (let i = 0; i < items.length; i++) {
const item = items[i];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,11 @@
* you may not use this file except in compliance with the Elastic License.
*/

export { fileDataVisualizerProvider } from './file_data_visualizer';
export { importDataProvider } from './import_data';
export {
fileDataVisualizerProvider,
InputOverrides,
InputData,
AnalysisResult,
} from './file_data_visualizer';

export { importDataProvider, Settings, InjectPipeline, Mappings } from './import_data';
4 changes: 3 additions & 1 deletion x-pack/legacy/plugins/ml/server/routes/apidoc.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
"version": "0.1.0",
"description": "ML Kibana API",
"title": "ML Kibana API",
"url" : "/api/ml/",
"order": [
"DataFrameAnalytics",
"GetDataFrameAnalytics",
Expand Down Expand Up @@ -34,6 +33,9 @@
"ForecastAnomalyDetector",
"GetOverallBuckets",
"GetCategories",
"FileDataVisualizer",
"AnalyzeFile",
"ImportFile"
"ResultsService",
"GetAnomaliesTableData",
"GetCategoryDefinition",
Expand Down
Loading

0 comments on commit 4776cd9

Please sign in to comment.