Skip to content

Commit

Permalink
[build] Support for generating docker image (#28380)
Browse files Browse the repository at this point in the history
  • Loading branch information
mistic authored and tylersmalley committed Feb 5, 2019
1 parent 13a213c commit 3a1d4ad
Show file tree
Hide file tree
Showing 11 changed files with 520 additions and 3 deletions.
5 changes: 5 additions & 0 deletions src/dev/build/build_distributables.js
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ import {
CreateArchivesSourcesTask,
CreateArchivesTask,
CreateDebPackageTask,
CreateDockerPackageTask,
CreateEmptyDirsAndFilesTask,
CreateNoticeFileTask,
CreatePackageJsonTask,
Expand Down Expand Up @@ -64,6 +65,7 @@ export async function buildDistributables(options) {
createArchives,
createRpmPackage,
createDebPackage,
createDockerPackage,
versionQualifier,
targetAllPlatforms,
} = options;
Expand Down Expand Up @@ -145,6 +147,9 @@ export async function buildDistributables(options) {
if (createRpmPackage) { // control w/ --rpm or --skip-os-packages
await run(CreateRpmPackageTask);
}
if (createDockerPackage) { // control w/ --docker or --skip-os-packages
await run(CreateDockerPackageTask);
}

/**
* finalize artifacts by writing sha1sums of each into the target directory
Expand Down
13 changes: 11 additions & 2 deletions src/dev/build/cli.js
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ const flags = getopts(process.argv.slice(0), {
'skip-os-packages',
'rpm',
'deb',
'docker',
'release',
'skip-node-download',
'verbose',
Expand Down Expand Up @@ -75,10 +76,11 @@ if (flags.help) {
--oss {dim Only produce the OSS distributable of Kibana}
--no-oss {dim Only produce the default distributable of Kibana}
--skip-archives {dim Don't produce tar/zip archives}
--skip-os-packages {dim Don't produce rpm/deb packages}
--skip-os-packages {dim Don't produce rpm/deb/docker packages}
--all-platforms {dim Produce archives for all platforms, not just this one}
--rpm {dim Only build the rpm package}
--deb {dim Only build the deb package}
--docker {dim Only build the docker image}
--release {dim Produce a release-ready distributable}
--version-qualifier {dim Suffix version with a qualifier}
--skip-node-download {dim Reuse existing downloads of node.js}
Expand All @@ -89,6 +91,12 @@ if (flags.help) {
process.exit(1);
}

// In order to build a docker image we always need
// to generate all the platforms
if (flags.docker) {
flags['all-platforms'] = true;
}

const log = new ToolingLog({
level: pickLevelFromFlags(flags, {
default: flags.debug === false ? 'info' : 'debug'
Expand All @@ -102,7 +110,7 @@ function isOsPackageDesired(name) {
}

// build all if no flags specified
if (flags.rpm === undefined && flags.deb === undefined) {
if (flags.rpm === undefined && flags.deb === undefined && flags.docker === undefined) {
return true;
}

Expand All @@ -119,6 +127,7 @@ buildDistributables({
createArchives: !Boolean(flags['skip-archives']),
createRpmPackage: isOsPackageDesired('rpm'),
createDebPackage: isOsPackageDesired('deb'),
createDockerPackage: isOsPackageDesired('docker'),
targetAllPlatforms: Boolean(flags['all-platforms']),
}).catch(error => {
if (!isErrorLogged(error)) {
Expand Down
9 changes: 9 additions & 0 deletions src/dev/build/tasks/os_packages/create_os_package_tasks.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
*/

import { runFpm } from './run_fpm';
import { runDockerGenerator } from './docker_generator';

export const CreateDebPackageTask = {
description: 'Creating deb package',
Expand All @@ -40,3 +41,11 @@ export const CreateRpmPackageTask = {
]);
}
};

export const CreateDockerPackageTask = {
description: 'Creating docker package',

async run(config, log, build) {
await runDockerGenerator(config, log, build);
}
};
20 changes: 20 additions & 0 deletions src/dev/build/tasks/os_packages/docker_generator/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

export * from './run';
Original file line number Diff line number Diff line change
@@ -0,0 +1,175 @@
#!/bin/bash

# Run Kibana, using environment variables to set longopts defining Kibana's
# configuration.
#
# eg. Setting the environment variable:
#
# ELASTICSEARCH_STARTUPTIMEOUT=60
#
# will cause Kibana to be invoked with:
#
# --elasticsearch.startupTimeout=60

kibana_vars=(
console.enabled
console.proxyConfig
console.proxyFilter
elasticsearch.customHeaders
elasticsearch.hosts
elasticsearch.logQueries
elasticsearch.password
elasticsearch.pingTimeout
elasticsearch.preserveHost
elasticsearch.requestHeadersWhitelist
elasticsearch.requestTimeout
elasticsearch.shardTimeout
elasticsearch.sniffInterval
elasticsearch.sniffOnConnectionFault
elasticsearch.sniffOnStart
elasticsearch.ssl.certificate
elasticsearch.ssl.certificateAuthorities
elasticsearch.ssl.key
elasticsearch.ssl.keyPassphrase
elasticsearch.ssl.verificationMode
elasticsearch.startupTimeout
elasticsearch.username
i18n.locale
kibana.defaultAppId
kibana.index
logging.dest
logging.quiet
logging.silent
logging.useUTC
logging.verbose
map.includeElasticMapsService
ops.interval
path.data
pid.file
regionmap
regionmap.includeElasticMapsService
server.basePath
server.customResponseHeaders
server.defaultRoute
server.host
server.maxPayloadBytes
server.name
server.port
server.rewriteBasePath
server.ssl.cert
server.ssl.certificate
server.ssl.certificateAuthorities
server.ssl.cipherSuites
server.ssl.clientAuthentication
server.customResponseHeaders
server.ssl.enabled
server.ssl.key
server.ssl.keyPassphrase
server.ssl.redirectHttpFromPort
server.ssl.supportedProtocols
server.xsrf.whitelist
status.allowAnonymous
status.v6ApiFormat
tilemap.options.attribution
tilemap.options.maxZoom
tilemap.options.minZoom
tilemap.options.subdomains
tilemap.url
timelion.enabled
vega.enableExternalUrls
xpack.apm.enabled
xpack.apm.ui.enabled
xpack.canvas.enabled
xpack.graph.enabled
xpack.grokdebugger.enabled
xpack.infra.enabled
xpack.infra.query.partitionFactor
xpack.infra.query.partitionSize
xpack.infra.sources.default.fields.container
xpack.infra.sources.default.fields.host
xpack.infra.sources.default.fields.message
xpack.infra.sources.default.fields.pod
xpack.infra.sources.default.fields.tiebreaker
xpack.infra.sources.default.fields.timestamp
xpack.infra.sources.default.logAlias
xpack.infra.sources.default.metricAlias
xpack.ml.enabled
xpack.monitoring.elasticsearch.password
xpack.monitoring.elasticsearch.pingTimeout
xpack.monitoring.elasticsearch.hosts
xpack.monitoring.elasticsearch.username
xpack.monitoring.elasticsearch.ssl.certificateAuthorities
xpack.monitoring.elasticsearch.ssl.verificationMode
xpack.monitoring.enabled
xpack.monitoring.kibana.collection.enabled
xpack.monitoring.kibana.collection.interval
xpack.monitoring.max_bucket_size
xpack.monitoring.min_interval_seconds
xpack.monitoring.node_resolver
xpack.monitoring.report_stats
xpack.monitoring.elasticsearch.pingTimeout
xpack.monitoring.ui.container.elasticsearch.enabled
xpack.monitoring.ui.container.logstash.enabled
xpack.monitoring.ui.enabled
xpack.reporting.capture.browser.chromium.disableSandbox
xpack.reporting.capture.browser.chromium.proxy.enabled
xpack.reporting.capture.browser.chromium.proxy.server
xpack.reporting.capture.browser.chromium.proxy.bypass
xpack.reporting.capture.browser.type
xpack.reporting.capture.concurrency
xpack.reporting.capture.loadDelay
xpack.reporting.capture.settleTime
xpack.reporting.capture.timeout
xpack.reporting.csv.maxSizeBytes
xpack.reporting.enabled
xpack.reporting.encryptionKey
xpack.reporting.index
xpack.reporting.kibanaApp
xpack.reporting.kibanaServer.hostname
xpack.reporting.kibanaServer.port
xpack.reporting.kibanaServer.protocol
xpack.reporting.queue.indexInterval
xpack.reporting.queue.pollInterval
xpack.reporting.queue.timeout
xpack.reporting.roles.allow
xpack.searchprofiler.enabled
xpack.security.authProviders
xpack.security.cookieName
xpack.security.enabled
xpack.security.encryptionKey
xpack.security.secureCookies
xpack.security.sessionTimeout
xpack.xpack_main.telemetry.enabled
)

longopts=''
for kibana_var in ${kibana_vars[*]}; do
# 'elasticsearch.hosts' -> 'ELASTICSEARCH_HOSTS'
env_var=$(echo ${kibana_var^^} | tr . _)

# Indirectly lookup env var values via the name of the var.
# REF: http://tldp.org/LDP/abs/html/bashver2.html#EX78
value=${!env_var}
if [[ -n $value ]]; then
longopt="--${kibana_var}=${value}"
longopts+=" ${longopt}"
fi
done

# Files created at run-time should be group-writable, for Openshift's sake.
umask 0002

# The virtual file /proc/self/cgroup should list the current cgroup
# membership. For each hierarchy, you can follow the cgroup path from
# this file to the cgroup filesystem (usually /sys/fs/cgroup/) and
# introspect the statistics for the cgroup for the given
# hierarchy. Alas, Docker breaks this by mounting the container
# statistics at the root while leaving the cgroup paths as the actual
# paths. Therefore, Kibana provides a mechanism to override
# reading the cgroup path from /proc/self/cgroup and instead uses the
# cgroup path defined the configuration properties
# cpu.cgroup.path.override and cpuacct.cgroup.path.override.
# Therefore, we set this value here so that cgroup statistics are
# available for the container this process will run in.

exec /usr/share/kibana/bin/kibana --cpu.cgroup.path.override=/ --cpuacct.cgroup.path.override=/ ${longopts} "$@"
99 changes: 99 additions & 0 deletions src/dev/build/tasks/os_packages/docker_generator/run.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

import { access, link, unlink, chmod } from 'fs';
import { resolve } from 'path';
import { promisify } from 'util';
import { write, copyAll, mkdirp, exec } from '../../../lib';
import * as dockerTemplates from './templates';

const accessAsync = promisify(access);
const linkAsync = promisify(link);
const unlinkAsync = promisify(unlink);
const chmodAsync = promisify(chmod);

export async function runDockerGenerator(config, log, build) {
const license = build.isOss() ? 'ASL 2.0' : 'Elastic License';
const imageFlavor = build.isOss() ? '-oss' : '';
const imageTag = 'docker.elastic.co/kibana/kibana';
const versionTag = config.getBuildVersion();
const artifactTarball = `kibana${ imageFlavor }-${ versionTag }-linux-x86_64.tar.gz`;
const artifactsDir = config.resolveFromTarget('.');
const dockerBuildDir = config.resolveFromRepo('build', 'kibana-docker', build.isOss() ? 'oss' : 'default');
const dockerOutputDir = config.resolveFromTarget(`kibana${ imageFlavor }-${ versionTag }-docker.tar.gz`);

// Verify if we have the needed kibana target in order
// to build the kibana docker image.
// Also create the docker build target folder
// and delete the current linked target into the
// kibana docker build folder if we have one.
try {
await accessAsync(resolve(artifactsDir, artifactTarball));
await mkdirp(dockerBuildDir);
await unlinkAsync(resolve(dockerBuildDir, artifactTarball));
} catch (e) {
if (e && e.code === 'ENOENT' && e.syscall === 'access') {
throw new Error(
`Kibana linux target (${ artifactTarball }) is needed in order to build ${''
}the docker image. None was found at ${ artifactsDir }`
);
}
}

// Create the kibana linux target inside the
// Kibana docker build
await linkAsync(
resolve(artifactsDir, artifactTarball),
resolve(dockerBuildDir, artifactTarball),
);

// Write all the needed docker config files
// into kibana-docker folder
const scope = {
artifactTarball,
imageFlavor,
versionTag,
license,
artifactsDir,
imageTag,
dockerOutputDir
};

for (const [, dockerTemplate] of Object.entries(dockerTemplates)) {
await write(resolve(dockerBuildDir, dockerTemplate.name), dockerTemplate.generator(scope));
}

// Copy all the needed resources into kibana-docker folder
// in order to build the docker image accordingly the dockerfile defined
// under templates/kibana_yml.template/js
await copyAll(
config.resolveFromRepo('src/dev/build/tasks/os_packages/docker_generator/resources'),
dockerBuildDir,
);

// Build docker image into the target folder
// In order to do this we just call the file we
// created from the templates/build_docker_sh.template.js
// and we just run that bash script
await chmodAsync(`${resolve(dockerBuildDir, 'build_docker.sh')}`, '755');
await exec(log, `./build_docker.sh`, [], {
cwd: dockerBuildDir,
level: 'info',
});
}
Loading

0 comments on commit 3a1d4ad

Please sign in to comment.