Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Reporting] Update Puppeteer to v7.1.0 #98096

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,7 @@
"proxy-from-env": "1.0.0",
"proxyquire": "1.8.0",
"puid": "1.0.7",
"puppeteer": "npm:@elastic/[email protected]",
"puppeteer": "7.1.0",
"query-string": "^6.13.2",
"raw-loader": "^3.1.0",
"rbush": "^3.0.1",
Expand Down Expand Up @@ -580,7 +580,6 @@
"@types/pretty-ms": "^5.0.0",
"@types/prop-types": "^15.7.3",
"@types/proper-lockfile": "^3.0.1",
"@types/puppeteer": "^5.4.1",
"@types/rbush": "^3.0.0",
"@types/reach__router": "^1.2.6",
"@types/react": "^16.9.36",
Expand Down
13 changes: 9 additions & 4 deletions src/dev/chromium_version.ts
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,13 @@ async function getPuppeteerRelease(log: ToolingLog): Promise<PuppeteerRelease> {
'Could not get the Puppeteer version! Check node_modules/puppteer/package.json'
);
}
log.info(`Kibana is using Puppeteer ${version} (${forkCompatibilityMap[version]})`);
return forkCompatibilityMap[version];
let puppeteerRelease = forkCompatibilityMap[version];
if (puppeteerRelease == null) {
puppeteerRelease = version;
}

log.info(`Kibana is using Puppeteer ${version} (${puppeteerRelease})`);
return puppeteerRelease;
}

async function getChromiumRevision(
Expand Down Expand Up @@ -129,8 +134,8 @@ run(
description: chalk`
Display the Chromium git commit that correlates to a given Puppeteer release.

- node x-pack/dev-tools/chromium_version 5.5.0 {dim # gets the Chromium commit for Puppeteer v5.5.0}
- node x-pack/dev-tools/chromium_version {dim # gets the Chromium commit for the Kibana dependency version of Puppeteer}
- node scripts/chromium_version 5.5.0 {dim # gets the Chromium commit for Puppeteer v5.5.0}
- node scripts/chromium_version {dim # gets the Chromium commit for the Kibana dependency version of Puppeteer}

You can use https://omahaproxy.appspot.com/ to look up the Chromium release that first shipped with that commit.
`,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,14 @@ const log = new ToolingLog({
});

describe(`enumeratePatterns`, () => {
it(`should resolve x-pack/plugins/reporting/server/browsers/extract/unzip.js to kibana-reporting`, () => {
it(`should resolve x-pack/plugins/reporting/server/browsers/extract/unzip.ts to kibana-reporting`, () => {
const actual = enumeratePatterns(REPO_ROOT)(log)(
new Map([['x-pack/plugins/reporting', ['kibana-reporting']]])
);

expect(
actual[0].includes(
'x-pack/plugins/reporting/server/browsers/extract/unzip.js kibana-reporting'
'x-pack/plugins/reporting/server/browsers/extract/unzip.ts kibana-reporting'
)
).toBe(true);
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -148,10 +148,10 @@ x-pack/plugins/reporting/server/browsers/download/download.ts kibana-reporting
x-pack/plugins/reporting/server/browsers/download/ensure_downloaded.ts kibana-reporting
x-pack/plugins/reporting/server/browsers/download/index.ts kibana-reporting
x-pack/plugins/reporting/server/browsers/download/util.ts kibana-reporting
x-pack/plugins/reporting/server/browsers/extract/extract.js kibana-reporting
x-pack/plugins/reporting/server/browsers/extract/extract_error.js kibana-reporting
x-pack/plugins/reporting/server/browsers/extract/index.js kibana-reporting
x-pack/plugins/reporting/server/browsers/extract/unzip.js kibana-reporting
x-pack/plugins/reporting/server/browsers/extract/extract.ts kibana-reporting
x-pack/plugins/reporting/server/browsers/extract/extract_error.ts kibana-reporting
x-pack/plugins/reporting/server/browsers/extract/index.ts kibana-reporting
x-pack/plugins/reporting/server/browsers/extract/unzip.ts kibana-reporting
x-pack/plugins/reporting/server/browsers/index.ts kibana-reporting
x-pack/plugins/reporting/server/browsers/install.ts kibana-reporting
x-pack/plugins/reporting/server/browsers/network_policy.test.ts kibana-reporting
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,27 +32,27 @@ describe(`Transform fns`, () => {
it(`should remove the jenkins workspace path`, () => {
const obj = {
staticSiteUrl:
'/var/lib/jenkins/workspace/elastic+kibana+code-coverage/kibana/x-pack/plugins/reporting/server/browsers/extract/unzip.js',
'/var/lib/jenkins/workspace/elastic+kibana+code-coverage/kibana/x-pack/plugins/reporting/server/browsers/extract/unzip.ts',
COVERAGE_INGESTION_KIBANA_ROOT:
'/var/lib/jenkins/workspace/elastic+kibana+code-coverage/kibana',
};
expect(coveredFilePath(obj)).toHaveProperty(
'coveredFilePath',
'x-pack/plugins/reporting/server/browsers/extract/unzip.js'
'x-pack/plugins/reporting/server/browsers/extract/unzip.ts'
);
});
});
describe(`in the qa research job`, () => {
it(`should remove the jenkins workspace path`, () => {
const obj = {
staticSiteUrl:
'/var/lib/jenkins/workspace/elastic+kibana+qa-research/kibana/x-pack/plugins/reporting/server/browsers/extract/unzip.js',
'/var/lib/jenkins/workspace/elastic+kibana+qa-research/kibana/x-pack/plugins/reporting/server/browsers/extract/unzip.ts',
COVERAGE_INGESTION_KIBANA_ROOT:
'/var/lib/jenkins/workspace/elastic+kibana+qa-research/kibana',
};
expect(coveredFilePath(obj)).toHaveProperty(
'coveredFilePath',
'x-pack/plugins/reporting/server/browsers/extract/unzip.js'
'x-pack/plugins/reporting/server/browsers/extract/unzip.ts'
);
});
});
Expand Down Expand Up @@ -82,7 +82,7 @@ describe(`Transform fns`, () => {
describe(`teamAssignment`, () => {
const teamAssignmentsPathMOCK =
'src/dev/code_coverage/ingest_coverage/__tests__/mocks/team_assign_mock.txt';
const coveredFilePath = 'x-pack/plugins/reporting/server/browsers/extract/unzip.js';
const coveredFilePath = 'x-pack/plugins/reporting/server/browsers/extract/unzip.ts';
const obj = { coveredFilePath };
const log = new ToolingLog({
level: 'info',
Expand Down
27 changes: 18 additions & 9 deletions x-pack/build_chromium/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ which is where we have two machines provisioned for the Linux and Windows
builds. Mac builds can be achieved locally, and are a great place to start to
gain familiarity.

**NOTE:** Linux builds should be done in Ubuntu on x86 architecture. ARM builds
are created in x86. CentOS is not supported for building Chromium.
**NOTE:** Linux builds should be done in Ubuntu on x64 architecture. ARM builds
are created in x64 using cross-compiling. CentOS is not supported for building Chromium.

1. Login to our GCP instance [here using your okta credentials](https://console.cloud.google.com/).
2. Click the "Compute Engine" tab.
Expand All @@ -27,25 +27,33 @@ are created in x86. CentOS is not supported for building Chromium.
- python2 (`python` must link to `python2`)
- lsb_release
- tmux is recommended in case your ssh session is interrupted
6. Copy the entire `build_chromium` directory into a GCP storage bucket, so you can copy the scripts into the instance and run them.
6. Copy the entire `build_chromium` directory into a GCP storage bucket. To do this, use `gsutil rsync`:

```sh
# This shows a preview of what would change by synchronizing the source scripts with the destination GCS bucket.
# Remove the `-n` flag to enact the changes
gsutil rsync -n -r x-pack/build_chromium gs://tsullivan-build_chromium/build_chromium
```

## Build Script Usage

```
These commands show how to set up an environment to build:

```sh
# Allow our scripts to use depot_tools commands
export PATH=$HOME/chromium/depot_tools:$PATH

# Create a dedicated working directory for this directory of Python scripts.
mkdir ~/chromium && cd ~/chromium

# Copy the scripts from the Kibana repo to use them conveniently in the working directory
gsutil cp -r gs://my-bucket/build_chromium .
# Copy the scripts from the Kibana team's GCS bucket
gsutil cp -r gs://tsullivan-build_chromium/build_chromium .

# Install the OS packages, configure the environment, download the chromium source (25GB)
python ./build_chromium/init.sh [arch_name]
python ./build_chromium/init.py [arch_name]

# Run the build script with the path to the chromium src directory, the git commit hash
python ./build_chromium/build.py <commit_id> x86
python ./build_chromium/build.py <commit_id> x64

# OR You can build for ARM
python ./build_chromium/build.py <commit_id> arm64
Expand Down Expand Up @@ -107,7 +115,8 @@ use the Kibana `build.py` script (in this directory).

It's recommended that you create a working directory for the chromium source
code and all the build tools, and run the commands from there:
```

```sh
mkdir ~/chromium && cd ~/chromium
cp -r ~/path/to/kibana/x-pack/build_chromium .
python ./build_chromium/init.sh [arch_name]
Expand Down
7 changes: 6 additions & 1 deletion x-pack/build_chromium/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@
runcmd,
runcmdsilent,
mkdir,
md5_file,
configure_environment,
md5_file,
)

# This file builds Chromium headless on Windows, Mac, and Linux.
Expand Down Expand Up @@ -57,6 +57,8 @@
print('Creating a new branch for tracking the source version')
runcmd('git checkout -b build-' + base_version + ' ' + source_version)

configure_environment(arch_name, build_path, src_path)

depot_tools_path = os.path.join(build_path, 'depot_tools')
path_value = depot_tools_path + os.pathsep + os.environ['PATH']
print('Updating PATH for depot_tools: ' + path_value)
Expand Down Expand Up @@ -136,3 +138,6 @@ def archive_file(name):
print('Creating ' + path.join(src_path, md5_filename))
with open (md5_filename, 'w') as f:
f.write(md5_file(zip_filename))

# TODO Upload to GCS
# runcmd('gsutil cp -r output.zip gs://tsullivan-build_chromium/build_chromium')
5 changes: 1 addition & 4 deletions x-pack/build_chromium/init.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import os, platform, sys
from os import path
from build_util import runcmd, mkdir, md5_file, configure_environment
from build_util import runcmd, mkdir

# This is a cross-platform initialization script which should only be run
# once per environment, and isn't intended to be run directly. You should
Expand Down Expand Up @@ -44,6 +44,3 @@
runcmd('fetch chromium --nohooks=1 --no-history=1')
else:
print('Directory exists: ' + chromium_dir + '. Skipping chromium fetch.')

# This depends on having the chromium/src directory with the complete checkout
configure_environment(arch_name, build_path, src_path)
6 changes: 5 additions & 1 deletion x-pack/build_chromium/linux/args.gn
Original file line number Diff line number Diff line change
@@ -1,8 +1,12 @@
import("//build/args/headless.gn")

is_debug = false
symbol_level = 0
is_component_build = false
remove_webcore_debug_symbols = true
enable_nacl = false

# Our build will fail without this
headless_use_embedded_resources = false

# Please, consult @elastic/kibana-security before changing/removing this option.
use_kerberos = false
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import { i18n } from '@kbn/i18n';
import { map, truncate } from 'lodash';
import open from 'opn';
import { ElementHandle, EvaluateFn, Page, Response, SerializableOrJSHandle } from 'puppeteer';
import puppeteer, { ElementHandle, EvaluateFn, SerializableOrJSHandle } from 'puppeteer';
import { parse as parseUrl } from 'url';
import { getDisallowedOutgoingUrlError } from '../';
import { ConditionalHeaders, ConditionalHeadersConditions } from '../../../export_types/common';
Expand Down Expand Up @@ -53,14 +53,14 @@ interface InterceptedRequest {
const WAIT_FOR_DELAY_MS: number = 100;

export class HeadlessChromiumDriver {
private readonly page: Page;
private readonly page: puppeteer.Page;
private readonly inspect: boolean;
private readonly networkPolicy: NetworkPolicy;

private listenersAttached = false;
private interceptedCount = 0;

constructor(page: Page, { inspect, networkPolicy }: ChromiumDriverOptions) {
constructor(page: puppeteer.Page, { inspect, networkPolicy }: ChromiumDriverOptions) {
this.page = page;
this.inspect = inspect;
this.networkPolicy = networkPolicy;
Expand Down Expand Up @@ -127,7 +127,7 @@ export class HeadlessChromiumDriver {
/*
* Call Page.screenshot and return a base64-encoded string of the image
*/
public async screenshot(elementPosition: ElementPosition): Promise<string> {
public async screenshot(elementPosition: ElementPosition): Promise<string | void> {
const { boundingClientRect, scroll } = elementPosition;
const screenshot = await this.page.screenshot({
clip: {
Expand All @@ -138,7 +138,10 @@ export class HeadlessChromiumDriver {
},
});

return screenshot.toString('base64');
if (screenshot) {
return screenshot.toString('base64');
}
return screenshot;
}

public async evaluate(
Expand All @@ -160,6 +163,11 @@ export class HeadlessChromiumDriver {
const { timeout } = opts;
logger.debug(`waitForSelector ${selector}`);
const resp = await this.page.waitForSelector(selector, { timeout }); // override default 30000ms

if (!resp) {
throw new Error(`Failure in waitForSelector: void response! Context: ${context.context}`);
}

logger.debug(`waitForSelector ${selector} resolved`);
return resp;
}
Expand Down Expand Up @@ -219,6 +227,7 @@ export class HeadlessChromiumDriver {
}

// @ts-ignore
// FIXME: use `await page.target().createCDPSession();`
const client = this.page._client;

// We have to reach into the Chrome Devtools Protocol to apply headers as using
Expand Down Expand Up @@ -293,7 +302,7 @@ export class HeadlessChromiumDriver {
// Even though 3xx redirects go through our request
// handler, we should probably inspect responses just to
// avoid being bamboozled by some malicious request
this.page.on('response', (interceptedResponse: Response) => {
this.page.on('response', (interceptedResponse: puppeteer.HTTPResponse) => {
const interceptedUrl = interceptedResponse.url();
const allowed = !interceptedUrl.startsWith('file://');

Expand All @@ -315,17 +324,16 @@ export class HeadlessChromiumDriver {

private async launchDebugger() {
// In order to pause on execution we have to reach more deeply into Chromiums Devtools Protocol,
// and more specifically, for the page being used. _client is per-page, and puppeteer doesn't expose
// a page's client in their api, so we have to reach into internals to get this behavior.
// Finally, in order to get the inspector running, we have to know the page's internal ID (again, private)
// and more specifically, for the page being used. _client is per-page.
// In order to get the inspector running, we have to know the page's internal ID (again, private)
// in order to construct the final debugging URL.

// @ts-ignore
await this.page._client.send('Debugger.enable');
// @ts-ignore
await this.page._client.send('Debugger.pause');
// @ts-ignore
const targetId = this.page._target._targetId;
const target = this.page.target();
const client = await target.createCDPSession();

await client.send('Debugger.enable');
await client.send('Debugger.pause');
const targetId = target._targetId;
const wsEndpoint = this.page.browser().wsEndpoint();
const { port } = parseUrl(wsEndpoint);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,8 @@ export const args = ({ userDataDir, viewport, disableSandbox, proxy: proxyConfig
// The viewport may later need to be resized depending on the position of the clip area.
// These numbers come from the job parameters, so this is a close guess.
`--window-size=${Math.floor(viewport.width)},${Math.floor(viewport.height)}`,
// allow screenshot capturing to work outside of the viewport
`--mainFrameClipsContent=false`,
];

if (proxyConfig.enabled) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ export class HeadlessChromiumDriverFactory {
const versionInfo = await client.send('Browser.getVersion');
logger.debug(`Browser version: ${JSON.stringify(versionInfo)}`);

await page.emulateTimezone(browserTimezone ?? null);
await page.emulateTimezone(browserTimezone);

// Set the default timeout for all navigation methods to the openUrl timeout (30 seconds)
// All waitFor methods have their own timeout config passed in to them
Expand Down Expand Up @@ -173,7 +173,7 @@ export class HeadlessChromiumDriverFactory {
})
);

const pageRequestFailed$ = Rx.fromEvent<puppeteer.Request>(page, 'requestfailed').pipe(
const pageRequestFailed$ = Rx.fromEvent<puppeteer.HTTPRequest>(page, 'requestfailed').pipe(
map((req) => {
const failure = req.failure && req.failure();
if (failure) {
Expand All @@ -193,6 +193,10 @@ export class HeadlessChromiumDriverFactory {
// Puppeteer doesn't give a handle to the original ChildProcess object
// See https://github.com/GoogleChrome/puppeteer/issues/1292#issuecomment-521470627

if (childProcess == null) {
throw new Error('TypeError! ChildProcess is null!');
}

// just log closing of the process
const processClose$ = Rx.fromEvent<void>(childProcess, 'close').pipe(
tap(() => {
Expand Down
Loading