Skip to content

Commit

Permalink
Merge branch 'master' of github.com:elastic/kibana into controls/dash…
Browse files Browse the repository at this point in the history
…boardIntegration
  • Loading branch information
ThomThomson committed Oct 24, 2021
2 parents c6732dc + 110a841 commit 43a9442
Show file tree
Hide file tree
Showing 31 changed files with 762 additions and 298 deletions.
2 changes: 2 additions & 0 deletions packages/elastic-apm-generator/BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ NPM_MODULE_EXTRA_FILES = [
]

RUNTIME_DEPS = [
"//packages/elastic-datemath",
"@npm//@elastic/elasticsearch",
"@npm//lodash",
"@npm//moment",
Expand All @@ -36,6 +37,7 @@ RUNTIME_DEPS = [
]

TYPES_DEPS = [
"//packages/elastic-datemath",
"@npm//@elastic/elasticsearch",
"@npm//moment",
"@npm//p-limit",
Expand Down
28 changes: 21 additions & 7 deletions packages/elastic-apm-generator/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ This section assumes that you've installed Kibana's dependencies by running `yar
This library can currently be used in two ways:

- Imported as a Node.js module, for instance to be used in Kibana's functional test suite.
- With a command line interface, to index data based on some example scenarios.
- With a command line interface, to index data based on a specified scenario.

### Using the Node.js module

Expand All @@ -32,7 +32,7 @@ const instance = service('synth-go', 'production', 'go')
.instance('instance-a');

const from = new Date('2021-01-01T12:00:00.000Z').getTime();
const to = new Date('2021-01-01T12:00:00.000Z').getTime() - 1;
const to = new Date('2021-01-01T12:00:00.000Z').getTime();

const traceEvents = timerange(from, to)
.interval('1m')
Expand Down Expand Up @@ -82,12 +82,26 @@ const esEvents = toElasticsearchOutput([

### CLI

Via the CLI, you can upload examples. The supported examples are listed in `src/lib/es.ts`. A `--target` option that specifies the Elasticsearch URL should be defined when running the `example` command. Here's an example:
Via the CLI, you can upload scenarios, either using a fixed time range or continuously generating data. Some examples are available in in `src/scripts/examples`. Here's an example for live data:

`$ node packages/elastic-apm-generator/src/scripts/es.js example simple-trace --target=http://admin:changeme@localhost:9200`
`$ node packages/elastic-apm-generator/src/scripts/run packages/elastic-apm-generator/src/examples/01_simple_trace.ts --target=http://admin:changeme@localhost:9200 --live`

For a fixed time window:
`$ node packages/elastic-apm-generator/src/scripts/run packages/elastic-apm-generator/src/examples/01_simple_trace.ts --target=http://admin:changeme@localhost:9200 --from=now-24h --to=now`

The script will try to automatically find bootstrapped APM indices. __If these indices do not exist, the script will exit with an error. It will not bootstrap the indices itself.__

The following options are supported:
- `to`: the end of the time range, in ISO format. By default, the current time will be used.
- `from`: the start of the time range, in ISO format. By default, `to` minus 15 minutes will be used.
- `apm-server-version`: the version used in the index names bootstrapped by APM Server, e.g. `7.16.0`. __If these indices do not exist, the script will exit with an error. It will not bootstrap the indices itself.__
| Option | Description | Default |
| -------------- | ------------------------------------------------------- | ------------ |
| `--from` | The start of the time window. | `now - 15m` |
| `--to` | The end of the time window. | `now` |
| `--live` | Continously ingest data | `false` |
| `--bucketSize` | Size of bucket for which to generate data. | `15m` |
| `--clean` | Clean APM indices before indexing new data. | `false` |
| `--interval` | The interval at which to index data. | `10s` |
| `--logLevel` | Log level. | `info` |
| `--lookback` | The lookback window for which data should be generated. | `15m` |
| `--target` | Elasticsearch target, including username/password. | **Required** |
| `--workers` | Amount of simultaneously connected ES clients. | `1` |

13 changes: 13 additions & 0 deletions packages/elastic-apm-generator/src/.eslintrc.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/

module.exports = {
rules: {
'import/no-default-export': 'off',
},
};
2 changes: 1 addition & 1 deletion packages/elastic-apm-generator/src/lib/interval.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ export class Interval {
throw new Error('Failed to parse interval');
}
const timestamps: number[] = [];
while (now <= this.to) {
while (now < this.to) {
timestamps.push(...new Array<number>(rate).fill(now));
now = moment(now)
.add(Number(args[1]), args[2] as any)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,25 @@ import { set } from 'lodash';
import { getObserverDefaults } from '../..';
import { Fields } from '../entity';

export function toElasticsearchOutput(events: Fields[], versionOverride?: string) {
export interface ElasticsearchOutput {
_index: string;
_source: unknown;
}

export interface ElasticsearchOutputWriteTargets {
transaction: string;
span: string;
error: string;
metric: string;
}

export function toElasticsearchOutput({
events,
writeTargets,
}: {
events: Fields[];
writeTargets: ElasticsearchOutputWriteTargets;
}): ElasticsearchOutput[] {
return events.map((event) => {
const values = {
...event,
Expand All @@ -29,7 +47,7 @@ export function toElasticsearchOutput(events: Fields[], versionOverride?: string
set(document, key, val);
}
return {
_index: `apm-${versionOverride || values['observer.version']}-${values['processor.event']}`,
_index: writeTargets[event['processor.event'] as keyof ElasticsearchOutputWriteTargets],
_source: document,
};
});
Expand Down
113 changes: 0 additions & 113 deletions packages/elastic-apm-generator/src/scripts/es.ts

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@
import { service, timerange, getTransactionMetrics, getSpanDestinationMetrics } from '../..';
import { getBreakdownMetrics } from '../../lib/utils/get_breakdown_metrics';

export function simpleTrace(from: number, to: number) {
export default function ({ from, to }: { from: number; to: number }) {
const instance = service('opbeans-go', 'production', 'go').instance('instance');

const range = timerange(from, to);

const transactionName = '240rpm/60% 1000ms';
const transactionName = '240rpm/75% 1000ms';

const successfulTraceEvents = range
.interval('1s')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,4 @@ require('@babel/register')({
presets: [['@babel/preset-env', { targets: { node: 'current' } }], '@babel/preset-typescript'],
});

require('./es.ts');
require('./run.ts');
117 changes: 117 additions & 0 deletions packages/elastic-apm-generator/src/scripts/run.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
import datemath from '@elastic/datemath';
import yargs from 'yargs/yargs';
import { cleanWriteTargets } from './utils/clean_write_targets';
import {
bucketSizeOption,
cleanOption,
fileOption,
intervalOption,
targetOption,
workerOption,
logLevelOption,
} from './utils/common_options';
import { intervalToMs } from './utils/interval_to_ms';
import { getCommonResources } from './utils/get_common_resources';
import { startHistoricalDataUpload } from './utils/start_historical_data_upload';
import { startLiveDataUpload } from './utils/start_live_data_upload';

yargs(process.argv.slice(2))
.command(
'*',
'Generate data and index into Elasticsearch',
(y) => {
return y
.positional('file', fileOption)
.option('bucketSize', bucketSizeOption)
.option('workers', workerOption)
.option('interval', intervalOption)
.option('clean', cleanOption)
.option('target', targetOption)
.option('logLevel', logLevelOption)
.option('from', {
description: 'The start of the time window',
})
.option('to', {
description: 'The end of the time window',
})
.option('live', {
description: 'Generate and index data continuously',
boolean: true,
})
.conflicts('to', 'live');
},
async (argv) => {
const {
scenario,
intervalInMs,
bucketSizeInMs,
target,
workers,
clean,
logger,
writeTargets,
client,
} = await getCommonResources(argv);

if (clean) {
await cleanWriteTargets({ writeTargets, client, logger });
}

const to = datemath.parse(String(argv.to ?? 'now'))!.valueOf();
const from = argv.from
? datemath.parse(String(argv.from))!.valueOf()
: to - intervalToMs('15m');

const live = argv.live;

logger.info(
`Starting data generation\n: ${JSON.stringify(
{
intervalInMs,
bucketSizeInMs,
workers,
target,
writeTargets,
from: new Date(from).toISOString(),
to: new Date(to).toISOString(),
live,
},
null,
2
)}`
);

startHistoricalDataUpload({
from,
to,
scenario,
intervalInMs,
bucketSizeInMs,
client,
workers,
writeTargets,
logger,
});

if (live) {
startLiveDataUpload({
bucketSizeInMs,
client,
intervalInMs,
logger,
scenario,
start: to,
workers,
writeTargets,
});
}
}
)
.parse();
Loading

0 comments on commit 43a9442

Please sign in to comment.