Skip to content

Commit

Permalink
[Reporting] code cleanup for reporting browser build/install/setup ut…
Browse files Browse the repository at this point in the history
…ilities (elastic#98799)

* [Reporting] code cleanup for reporting browser setup utilities

* fix target_cpu

* Update README.md

* Update README.md

* add note about target_cpu

* Update paths.ts

* more cleanup

* Update src/dev/chromium_version.ts

Co-authored-by: Michael Dokolin <[email protected]>

* remove bug

Co-authored-by: Michael Dokolin <[email protected]>
Co-authored-by: Kibana Machine <[email protected]>
# Conflicts:
#	src/dev/code_coverage/ingest_coverage/__tests__/enumerate_patterns.test.js
#	src/dev/code_coverage/ingest_coverage/__tests__/mocks/team_assign_mock.txt
#	src/dev/code_coverage/ingest_coverage/__tests__/transforms.test.js
  • Loading branch information
tsullivan committed Apr 30, 2021
1 parent 91f4bc8 commit 9c8190a
Show file tree
Hide file tree
Showing 30 changed files with 623 additions and 224 deletions.
10 changes: 6 additions & 4 deletions src/dev/chromium_version.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,10 @@ async function getPuppeteerRelease(log: ToolingLog): Promise<PuppeteerRelease> {
'Could not get the Puppeteer version! Check node_modules/puppteer/package.json'
);
}
log.info(`Kibana is using Puppeteer ${version} (${forkCompatibilityMap[version]})`);
return forkCompatibilityMap[version];
const puppeteerRelease = forkCompatibilityMap[version] ?? version;

log.info(`Kibana is using Puppeteer ${version} (${puppeteerRelease})`);
return puppeteerRelease;
}

async function getChromiumRevision(
Expand Down Expand Up @@ -129,8 +131,8 @@ run(
description: chalk`
Display the Chromium git commit that correlates to a given Puppeteer release.
- node x-pack/dev-tools/chromium_version 5.5.0 {dim # gets the Chromium commit for Puppeteer v5.5.0}
- node x-pack/dev-tools/chromium_version {dim # gets the Chromium commit for the Kibana dependency version of Puppeteer}
- node scripts/chromium_version 5.5.0 {dim # gets the Chromium commit for Puppeteer v5.5.0}
- node scripts/chromium_version {dim # gets the Chromium commit for the Kibana dependency version of Puppeteer}
You can use https://omahaproxy.appspot.com/ to look up the Chromium release that first shipped with that commit.
`,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/

import { enumeratePatterns } from '../team_assignment/enumerate_patterns';
import { ToolingLog, REPO_ROOT } from '@kbn/dev-utils';

const log = new ToolingLog({
level: 'info',
writeTo: process.stdout,
});

describe(`enumeratePatterns`, () => {
it(`should resolve x-pack/plugins/reporting/server/browsers/extract/unzip.ts to kibana-reporting`, () => {
const actual = enumeratePatterns(REPO_ROOT)(log)(
new Map([['x-pack/plugins/reporting', ['kibana-reporting']]])
);

expect(
actual[0].includes(
'x-pack/plugins/reporting/server/browsers/extract/unzip.ts kibana-reporting'
)
).toBe(true);
});
it(`should resolve src/plugins/charts/public/static/color_maps/color_maps.ts to kibana-app`, () => {
const actual = enumeratePatterns(REPO_ROOT)(log)(
new Map([['src/plugins/charts/public/static/color_maps', ['kibana-app']]])
);

expect(actual[0][0]).toBe(
'src/plugins/charts/public/static/color_maps/color_maps.ts kibana-app'
);
});
it(`should resolve x-pack/plugins/security_solution/public/common/components/exceptions/edit_exception_modal/translations.ts to kibana-security`, () => {
const short = 'x-pack/plugins/security_solution';
const actual = enumeratePatterns(REPO_ROOT)(log)(new Map([[short, ['kibana-security']]]));

expect(
actual[0].includes(
`${short}/public/common/components/exceptions/edit_exception_modal/translations.ts kibana-security`
)
).toBe(true);
});
});

Large diffs are not rendered by default.

137 changes: 137 additions & 0 deletions src/dev/code_coverage/ingest_coverage/__tests__/transforms.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/

import {
ciRunUrl,
coveredFilePath,
itemizeVcs,
prokPrevious,
teamAssignment,
last,
} from '../transforms';
import { ToolingLog } from '@kbn/dev-utils';

describe(`Transform fns`, () => {
describe(`ciRunUrl`, () => {
it(`should add the url when present in the environment`, () => {
process.env.CI_RUN_URL = 'blah';
expect(ciRunUrl()).toHaveProperty('ciRunUrl', 'blah');
});
it(`should not include the url if not present in the environment`, () => {
process.env.CI_RUN_URL = void 0;
expect(ciRunUrl({ a: 'a' })).not.toHaveProperty('ciRunUrl');
});
});
describe(`coveredFilePath`, () => {
describe(`in the code-coverage job`, () => {
it(`should remove the jenkins workspace path`, () => {
const obj = {
staticSiteUrl:
'/var/lib/jenkins/workspace/elastic+kibana+code-coverage/kibana/x-pack/plugins/reporting/server/browsers/extract/unzip.ts',
COVERAGE_INGESTION_KIBANA_ROOT:
'/var/lib/jenkins/workspace/elastic+kibana+code-coverage/kibana',
};
expect(coveredFilePath(obj)).toHaveProperty(
'coveredFilePath',
'x-pack/plugins/reporting/server/browsers/extract/unzip.ts'
);
});
});
describe(`in the qa research job`, () => {
it(`should remove the jenkins workspace path`, () => {
const obj = {
staticSiteUrl:
'/var/lib/jenkins/workspace/elastic+kibana+qa-research/kibana/x-pack/plugins/reporting/server/browsers/extract/unzip.ts',
COVERAGE_INGESTION_KIBANA_ROOT:
'/var/lib/jenkins/workspace/elastic+kibana+qa-research/kibana',
};
expect(coveredFilePath(obj)).toHaveProperty(
'coveredFilePath',
'x-pack/plugins/reporting/server/browsers/extract/unzip.ts'
);
});
});
});
describe(`prokPrevious`, () => {
const comparePrefixF = () => 'https://github.com/elastic/kibana/compare';
process.env.FETCHED_PREVIOUS = 'A';
it(`should return a previous compare url`, () => {
const actual = prokPrevious(comparePrefixF)('B');
expect(actual).toBe(`https://github.com/elastic/kibana/compare/A...B`);
});
});
describe(`itemizeVcs`, () => {
it(`should return a sha url`, () => {
const vcsInfo = [
'origin/ingest-code-coverage',
'f07b34f6206',
`Tre' Seymour`,
`Lorem :) ipsum Tre' λ dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.`,
];
expect(itemizeVcs(vcsInfo)({}).vcs).toHaveProperty(
'vcsUrl',
`https://github.com/elastic/kibana/commit/${vcsInfo[1]}`
);
});
});
describe(`teamAssignment`, () => {
const teamAssignmentsPathMOCK =
'src/dev/code_coverage/ingest_coverage/__tests__/mocks/team_assign_mock.txt';
const coveredFilePath = 'x-pack/plugins/reporting/server/browsers/extract/unzip.ts';
const obj = { coveredFilePath };
const log = new ToolingLog({
level: 'info',
writeTo: process.stdout,
});

describe(`with a coveredFilePath of ${coveredFilePath}`, () => {
const expected = 'kibana-reporting';
it(`should resolve to ${expected}`, async () => {
const actual = await teamAssignment(teamAssignmentsPathMOCK)(log)(obj);
const { team } = actual;
expect(team).toEqual(expected);
});
});

describe(`with a coveredFilePath of src/plugins/charts/public/static/color_maps/color_maps.ts`, () => {
const expected = 'kibana-reporting';
it(`should resolve to ${expected}`, async () => {
const actual = await teamAssignment(teamAssignmentsPathMOCK)(log)(obj);
const { team } = actual;
expect(team).toEqual(expected);
});
});

describe(`last fn`, () => {
describe(`applied to n results`, () => {
it(`should pick the last one`, () => {
const nteams = `src/plugins/charts/public/static/color_maps/color_maps.ts kibana-app
src/plugins/charts/public/static/color_maps/color_maps.ts kibana-app-arch`;

const actual = last(nteams);

expect(actual).toBe(
'src/plugins/charts/public/static/color_maps/color_maps.ts kibana-app-arch'
);
});
});
describe(`applied to 1 result`, () => {
it(`should pick that 1 result`, () => {
const nteams =
'src/plugins/charts/public/static/color_maps/color_maps.ts kibana-app-arch';

const actual = last(nteams);

expect(actual).toBe(
'src/plugins/charts/public/static/color_maps/color_maps.ts kibana-app-arch'
);
});
});
});
});
});
2 changes: 1 addition & 1 deletion x-pack/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
/target
/test/functional/failure_debug
/test/functional/screenshots
/test/functional/apps/reporting/reports/session
/test/functional/apps/**/reports/session
/test/reporting/configs/failure_debug/
/plugins/reporting/.chromium/
/plugins/reporting/chromium/
Expand Down
26 changes: 17 additions & 9 deletions x-pack/build_chromium/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ which is where we have two machines provisioned for the Linux and Windows
builds. Mac builds can be achieved locally, and are a great place to start to
gain familiarity.

**NOTE:** Linux builds should be done in Ubuntu on x86 architecture. ARM builds
are created in x86. CentOS is not supported for building Chromium.
**NOTE:** Linux builds should be done in Ubuntu on x64 architecture. ARM builds
are created in x64 using cross-compiling. CentOS is not supported for building Chromium.

1. Login to our GCP instance [here using your okta credentials](https://console.cloud.google.com/).
2. Click the "Compute Engine" tab.
Expand All @@ -27,25 +27,32 @@ are created in x86. CentOS is not supported for building Chromium.
- python2 (`python` must link to `python2`)
- lsb_release
- tmux is recommended in case your ssh session is interrupted
6. Copy the entire `build_chromium` directory into a GCP storage bucket, so you can copy the scripts into the instance and run them.
- "Cloud API access scopes": must have **read / write** scope for the Storage API
6. Copy the entire `build_chromium` directory from the `headless_shell_staging` bucket. To do this, use `gsutil rsync`:
```sh
# This shows a preview of what would change by synchronizing the source scripts with the destination GCS bucket.
# Remove the `-n` flag to enact the changes
gsutil -m rsync -n -r x-pack/build_chromium gs://headless_shell_staging/build_chromium
```

## Build Script Usage

```
These commands show how to set up an environment to build:
```sh
# Allow our scripts to use depot_tools commands
export PATH=$HOME/chromium/depot_tools:$PATH

# Create a dedicated working directory for this directory of Python scripts.
mkdir ~/chromium && cd ~/chromium

# Copy the scripts from the Kibana repo to use them conveniently in the working directory
gsutil cp -r gs://my-bucket/build_chromium .
# Copy the scripts from the Kibana team's GCS bucket
gsutil cp -r gs://headless_shell_staging/build_chromium .

# Install the OS packages, configure the environment, download the chromium source (25GB)
python ./build_chromium/init.sh [arch_name]
python ./build_chromium/init.py [arch_name]

# Run the build script with the path to the chromium src directory, the git commit hash
python ./build_chromium/build.py <commit_id> x86
python ./build_chromium/build.py <commit_id> x64

# OR You can build for ARM
python ./build_chromium/build.py <commit_id> arm64
Expand Down Expand Up @@ -107,7 +114,7 @@ use the Kibana `build.py` script (in this directory).

It's recommended that you create a working directory for the chromium source
code and all the build tools, and run the commands from there:
```
```sh
mkdir ~/chromium && cd ~/chromium
cp -r ~/path/to/kibana/x-pack/build_chromium .
python ./build_chromium/init.sh [arch_name]
Expand Down Expand Up @@ -216,6 +223,7 @@ In the case of Windows, you can use IE to open `http://localhost:9221` and see i

The following links provide helpful context about how the Chromium build works, and its prerequisites:

- Tools for Chromium version information: https://omahaproxy.appspot.com/
- https://www.chromium.org/developers/how-tos/get-the-code/working-with-release-branches
- https://chromium.googlesource.com/chromium/src/+/HEAD/docs/windows_build_instructions.md
- https://chromium.googlesource.com/chromium/src/+/HEAD/docs/mac_build_instructions.md
Expand Down
49 changes: 29 additions & 20 deletions x-pack/build_chromium/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,21 +3,18 @@
from build_util import (
runcmd,
runcmdsilent,
mkdir,
md5_file,
configure_environment,
)

# This file builds Chromium headless on Windows, Mac, and Linux.

# Verify that we have an argument, and if not print instructions
if (len(sys.argv) < 2):
print('Usage:')
print('python build.py {chromium_version} [arch_name]')
print('python build.py {chromium_version} {arch_name}')
print('Example:')
print('python build.py 68.0.3440.106')
print('python build.py 4747cc23ae334a57a35ed3c8e6adcdbc8a50d479')
print('python build.py 4747cc23ae334a57a35ed3c8e6adcdbc8a50d479 arm64 # build for ARM architecture')
print('python build.py 4747cc23ae334a57a35ed3c8e6adcdbc8a50d479 x64')
print('python build.py 4747cc23ae334a57a35ed3c8e6adcdbc8a50d479 arm64 # cross-compile for ARM architecture')
print
sys.exit(1)

Expand Down Expand Up @@ -57,25 +54,34 @@
print('Creating a new branch for tracking the source version')
runcmd('git checkout -b build-' + base_version + ' ' + source_version)

# configure environment: environment path
depot_tools_path = os.path.join(build_path, 'depot_tools')
path_value = depot_tools_path + os.pathsep + os.environ['PATH']
print('Updating PATH for depot_tools: ' + path_value)
os.environ['PATH'] = path_value
full_path = depot_tools_path + os.pathsep + os.environ['PATH']
print('Updating PATH for depot_tools: ' + full_path)
os.environ['PATH'] = full_path

# configure environment: build dependencies
if platform.system() == 'Linux':
if arch_name:
print('Running sysroot install script...')
runcmd(src_path + '/build/linux/sysroot_scripts/install-sysroot.py --arch=' + arch_name)
print('Running install-build-deps...')
runcmd(src_path + '/build/install-build-deps.sh')


print('Updating all modules')
runcmd('gclient sync')
runcmd('gclient sync -D')

# Copy build args/{Linux | Darwin | Windows}.gn from the root of our directory to out/headless/args.gn,
argsgn_destination = path.abspath('out/headless/args.gn')
print('Generating platform-specific args')
mkdir('out/headless')
print(' > cp ' + argsgn_file + ' ' + argsgn_destination)
shutil.copyfile(argsgn_file, argsgn_destination)
print('Setting up build directory')
runcmd('rm -rf out/headless')
runcmd('mkdir out/headless')

# Copy build args/{Linux | Darwin | Windows}.gn from the root of our directory to out/headless/args.gn,
# add the target_cpu for cross-compilation
print('Adding target_cpu to args')

f = open('out/headless/args.gn', 'a')
f.write('\rtarget_cpu = "' + arch_name + '"\r')
f.close()
argsgn_file_out = path.abspath('out/headless/args.gn')
runcmd('cp ' + argsgn_file + ' ' + argsgn_file_out)
runcmd('echo \'target_cpu="' + arch_name + '"\' >> ' + argsgn_file_out)

runcmd('gn gen out/headless')

Expand Down Expand Up @@ -136,3 +142,6 @@ def archive_file(name):
print('Creating ' + path.join(src_path, md5_filename))
with open (md5_filename, 'w') as f:
f.write(md5_file(zip_filename))

runcmd('gsutil cp ' + path.join(src_path, zip_filename) + ' gs://headless_shell_staging')
runcmd('gsutil cp ' + path.join(src_path, md5_filename) + ' gs://headless_shell_staging')
16 changes: 0 additions & 16 deletions x-pack/build_chromium/build_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,19 +27,3 @@ def md5_file(filename):
for chunk in iter(lambda: f.read(128 * md5.block_size), b''):
md5.update(chunk)
return md5.hexdigest()

def configure_environment(arch_name, build_path, src_path):
"""Runs install scripts for deps, and configures temporary environment variables required by Chromium's build"""

if platform.system() == 'Linux':
if arch_name:
print('Running sysroot install script...')
sysroot_cmd = src_path + '/build/linux/sysroot_scripts/install-sysroot.py --arch=' + arch_name
runcmd(sysroot_cmd)
print('Running install-build-deps...')
runcmd(src_path + '/build/install-build-deps.sh')

depot_tools_path = os.path.join(build_path, 'depot_tools')
full_path = depot_tools_path + os.pathsep + os.environ['PATH']
print('Updating PATH for depot_tools: ' + full_path)
os.environ['PATH'] = full_path
Loading

0 comments on commit 9c8190a

Please sign in to comment.