Skip to content

Commit

Permalink
Merge branch 'master' of https://github.com/elastic/kibana into np_re…
Browse files Browse the repository at this point in the history
…ady_2
  • Loading branch information
igoristic committed Feb 7, 2020
2 parents 3089ed1 + d72a715 commit 27c3b05
Show file tree
Hide file tree
Showing 14 changed files with 386 additions and 212 deletions.
2 changes: 1 addition & 1 deletion src/optimize/base_optimizer.js
Original file line number Diff line number Diff line change
Expand Up @@ -459,7 +459,7 @@ export default class BaseOptimizer {
optimization: {
minimizer: [
new TerserPlugin({
parallel: this.getThreadLoaderPoolConfig().workers,
parallel: false,
sourceMap: false,
cache: false,
extractComments: false,
Expand Down
16 changes: 10 additions & 6 deletions src/optimize/dynamic_dll_plugin/dll_config_model.js
Original file line number Diff line number Diff line change
Expand Up @@ -214,16 +214,20 @@ function common(config) {
return webpackMerge(generateDLL(config));
}

function optimized(config) {
function optimized() {
return webpackMerge({
mode: 'production',
optimization: {
minimizer: [
new TerserPlugin({
// Apply the same logic used to calculate the
// threadLoaderPool workers number to spawn
// the parallel processes on terser
parallel: config.threadLoaderPoolConfig.workers,
// NOTE: we should not enable that option for now
// Since 2.0.0 terser-webpack-plugin is using jest-worker
// to run tasks in a pool of workers. Currently it looks like
// is requiring too much memory and break on large entry points
// compilations (like this) one. Also the gain we have enabling
// that option was barely noticed.
// https://github.com/webpack-contrib/terser-webpack-plugin/issues/143
parallel: false,
sourceMap: false,
cache: false,
extractComments: false,
Expand All @@ -250,5 +254,5 @@ export function configModel(rawConfig = {}) {
return webpackMerge(common(config), unoptimized());
}

return webpackMerge(common(config), optimized(config));
return webpackMerge(common(config), optimized());
}

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/

import Boom from 'boom';
import { RequestHandlerContext } from 'kibana/server';

export type InputData = any[];

export interface InputOverrides {
[key: string]: string;
}

export type FormattedOverrides = InputOverrides & {
column_names: string[];
has_header_row: boolean;
should_trim_fields: boolean;
};

export interface AnalysisResult {
results: {
charset: string;
has_header_row: boolean;
has_byte_order_marker: boolean;
format: string;
field_stats: {
[fieldName: string]: {
count: number;
cardinality: number;
top_hits: Array<{ count: number; value: any }>;
};
};
sample_start: string;
num_messages_analyzed: number;
mappings: {
[fieldName: string]: {
type: string;
};
};
quote: string;
delimiter: string;
need_client_timezone: boolean;
num_lines_analyzed: number;
column_names: string[];
};
overrides?: FormattedOverrides;
}

export function fileDataVisualizerProvider(context: RequestHandlerContext) {
async function analyzeFile(data: any, overrides: any): Promise<AnalysisResult> {
let results = [];

try {
results = await context.ml!.mlClient.callAsCurrentUser('ml.fileStructure', {
body: data,
...overrides,
});
} catch (error) {
const err = error.message !== undefined ? error.message : error;
throw Boom.badRequest(err);
}

const { hasOverrides, reducedOverrides } = formatOverrides(overrides);

return {
...(hasOverrides && { overrides: reducedOverrides }),
results,
};
}

return {
analyzeFile,
};
}

function formatOverrides(overrides: InputOverrides) {
let hasOverrides = false;

const reducedOverrides: FormattedOverrides = Object.keys(overrides).reduce((acc, overrideKey) => {
const overrideValue: string = overrides[overrideKey];
if (overrideValue !== '') {
if (overrideKey === 'column_names') {
acc.column_names = overrideValue.split(',');
} else if (overrideKey === 'has_header_row') {
acc.has_header_row = overrideValue === 'true';
} else if (overrideKey === 'should_trim_fields') {
acc.should_trim_fields = overrideValue === 'true';
} else {
acc[overrideKey] = overrideValue;
}

hasOverrides = true;
}
return acc;
}, {} as FormattedOverrides);

return {
reducedOverrides,
hasOverrides,
};
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,43 @@
* you may not use this file except in compliance with the Elastic License.
*/

import { RequestHandlerContext } from 'kibana/server';
import { INDEX_META_DATA_CREATED_BY } from '../../../common/constants/file_datavisualizer';
import { InputData } from './file_data_visualizer';

export function importDataProvider(callWithRequest) {
async function importData(id, index, settings, mappings, ingestPipeline, data) {
export interface Settings {
pipeline?: string;
index: string;
body: any[];
[key: string]: any;
}

export interface Mappings {
[key: string]: any;
}

export interface InjectPipeline {
id: string;
pipeline: any;
}

interface Failure {
item: number;
reason: string;
doc: any;
}

export function importDataProvider(context: RequestHandlerContext) {
const callAsCurrentUser = context.ml!.mlClient.callAsCurrentUser;

async function importData(
id: string,
index: string,
settings: Settings,
mappings: Mappings,
ingestPipeline: InjectPipeline,
data: InputData
) {
let createdIndex;
let createdPipelineId;
const docCount = data.length;
Expand Down Expand Up @@ -35,7 +68,7 @@ export function importDataProvider(callWithRequest) {
createdPipelineId = pipelineId;
}

let failures = [];
let failures: Failure[] = [];
if (data.length) {
const resp = await indexData(index, createdPipelineId, data);
if (resp.success === false) {
Expand Down Expand Up @@ -72,8 +105,8 @@ export function importDataProvider(callWithRequest) {
}
}

async function createIndex(index, settings, mappings) {
const body = {
async function createIndex(index: string, settings: Settings, mappings: Mappings) {
const body: { mappings: Mappings; settings?: Settings } = {
mappings: {
_meta: {
created_by: INDEX_META_DATA_CREATED_BY,
Expand All @@ -86,23 +119,23 @@ export function importDataProvider(callWithRequest) {
body.settings = settings;
}

await callWithRequest('indices.create', { index, body });
await callAsCurrentUser('indices.create', { index, body });
}

async function indexData(index, pipelineId, data) {
async function indexData(index: string, pipelineId: string, data: InputData) {
try {
const body = [];
for (let i = 0; i < data.length; i++) {
body.push({ index: {} });
body.push(data[i]);
}

const settings = { index, body };
const settings: Settings = { index, body };
if (pipelineId !== undefined) {
settings.pipeline = pipelineId;
}

const resp = await callWithRequest('bulk', settings);
const resp = await callAsCurrentUser('bulk', settings);
if (resp.errors) {
throw resp;
} else {
Expand All @@ -113,7 +146,7 @@ export function importDataProvider(callWithRequest) {
};
}
} catch (error) {
let failures = [];
let failures: Failure[] = [];
let ingestError = false;
if (error.errors !== undefined && Array.isArray(error.items)) {
// an expected error where some or all of the bulk request
Expand All @@ -134,11 +167,11 @@ export function importDataProvider(callWithRequest) {
}
}

async function createPipeline(id, pipeline) {
return await callWithRequest('ingest.putPipeline', { id, body: pipeline });
async function createPipeline(id: string, pipeline: any) {
return await callAsCurrentUser('ingest.putPipeline', { id, body: pipeline });
}

function getFailures(items, data) {
function getFailures(items: any[], data: InputData): Failure[] {
const failures = [];
for (let i = 0; i < items.length; i++) {
const item = items[i];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,11 @@
* you may not use this file except in compliance with the Elastic License.
*/

export { fileDataVisualizerProvider } from './file_data_visualizer';
export { importDataProvider } from './import_data';
export {
fileDataVisualizerProvider,
InputOverrides,
InputData,
AnalysisResult,
} from './file_data_visualizer';

export { importDataProvider, Settings, InjectPipeline, Mappings } from './import_data';
4 changes: 3 additions & 1 deletion x-pack/legacy/plugins/ml/server/routes/apidoc.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
"version": "0.1.0",
"description": "ML Kibana API",
"title": "ML Kibana API",
"url" : "/api/ml/",
"order": [
"DataFrameAnalytics",
"GetDataFrameAnalytics",
Expand Down Expand Up @@ -34,6 +33,9 @@
"ForecastAnomalyDetector",
"GetOverallBuckets",
"GetCategories",
"FileDataVisualizer",
"AnalyzeFile",
"ImportFile"
"ResultsService",
"GetAnomaliesTableData",
"GetCategoryDefinition",
Expand Down
Loading

0 comments on commit 27c3b05

Please sign in to comment.