diff --git a/package.json b/package.json index ce62a3dfe3b5..09b5f8886a50 100644 --- a/package.json +++ b/package.json @@ -880,7 +880,7 @@ "prop-types": "^15.8.1", "proxy-from-env": "1.0.0", "puid": "1.0.7", - "puppeteer": "19.7.2", + "puppeteer": "20.1.0", "query-string": "^6.13.2", "rbush": "^3.0.1", "re-resizable": "^6.1.1", diff --git a/src/dev/build/tasks/install_chromium.js b/src/dev/build/tasks/install_chromium.ts similarity index 89% rename from src/dev/build/tasks/install_chromium.js rename to src/dev/build/tasks/install_chromium.ts index 23e7ba6b5998..968f9f5b474a 100644 --- a/src/dev/build/tasks/install_chromium.js +++ b/src/dev/build/tasks/install_chromium.ts @@ -6,9 +6,11 @@ * Side Public License, v 1. */ +import type { Logger } from '@kbn/core/server'; import { install, paths } from '@kbn/screenshotting-plugin/server/utils'; +import type { Task } from '../lib'; -export const InstallChromium = { +export const InstallChromium: Task = { description: 'Installing Chromium', async run(config, log, build) { @@ -43,7 +45,7 @@ export const InstallChromium = { platform, 'node_modules/@kbn/screenshotting-plugin/chromium' ); - await install(logger, pkg, path); + await install(logger as unknown as Logger, pkg, path); } }, }; diff --git a/src/dev/chromium_version.ts b/src/dev/chromium_version/index.ts similarity index 57% rename from src/dev/chromium_version.ts rename to src/dev/chromium_version/index.ts index 22e5bfdc2b94..a02f600daad7 100644 --- a/src/dev/chromium_version.ts +++ b/src/dev/chromium_version/index.ts @@ -7,30 +7,31 @@ */ import { run } from '@kbn/dev-cli-runner'; -import { ToolingLog } from '@kbn/tooling-log'; import { REPO_ROOT } from '@kbn/repo-info'; +import { ToolingLog } from '@kbn/tooling-log'; import chalk from 'chalk'; -import cheerio from 'cheerio'; import fs from 'fs'; import fetch from 'node-fetch'; import path from 'path'; - -type PuppeteerRelease = string; -type ChromiumRevision = string; -type ChromiumCommit = string; - -// We forked the Puppeteer node module for Kibana, -// So we need to translate OUR version to the official Puppeteer Release -const forkCompatibilityMap: Record = { - '5.4.1-patch.1': '5.4.1', -}; +import { + type ChromeVersion, + type ChromiumCommit, + type ChromiumDashVersionType, + ChromiumDashVersionSchema, + forkCompatibilityMap, + PuppeteerPackageSchema, + type PuppeteerPackageType, + type PuppeteerRelease, +} from './util'; async function getPuppeteerRelease(log: ToolingLog): Promise { // open node_modules/puppeteer/package.json - const puppeteerPackageJson = JSON.parse( + const { version }: PuppeteerPackageType = JSON.parse( fs.readFileSync(path.resolve(REPO_ROOT, 'node_modules', 'puppeteer', 'package.json'), 'utf8') ); - const { version } = puppeteerPackageJson; + + PuppeteerPackageSchema.validate({ version }); + if (version == null) { throw new Error( 'Could not get the Puppeteer version! Check node_modules/puppteer/package.json' @@ -42,10 +43,10 @@ async function getPuppeteerRelease(log: ToolingLog): Promise { return puppeteerRelease; } -async function getChromiumRevision( +async function getChromeVersion( kibanaPuppeteerVersion: PuppeteerRelease, log: ToolingLog -): Promise { +): Promise { const url = `https://raw.githubusercontent.com/puppeteer/puppeteer/puppeteer-v${kibanaPuppeteerVersion}/packages/puppeteer-core/src/revisions.ts`; let body: string; try { @@ -57,58 +58,51 @@ async function getChromiumRevision( throw new Error(`Could not fetch ${url}. Check the URL in a browser and try again.`); } - let revision: ChromiumRevision | undefined; + let version: ChromeVersion | undefined; const lines = body.split('\n'); let cursor = lines.length; while (--cursor >= 0) { - // look for the line of code matching ` chromium: '0123456',` - const test = lines[cursor].match(/^\s+chromium: '(\S+)',$/); + // look for the line of code matching ` chrome: '113.0.5672.63',` + const test = lines[cursor].match(/^\s+chrome: '(\S+)',$/); if (test != null) { - log.debug(`Parsed revision from source text: \`${lines[cursor]}\``); - [, revision] = test; + log.debug(`Parsed Chrome version from source text: \`${lines[cursor]}\``); + [, version] = test; break; } } - if (revision == null) { + if (version == null) { throw new Error( - `Could not find a Chromium revision listed in Puppeteer source! Check ${url} in a browser` + `Could not find a Chrome version listed in Puppeteer source! Check ${url} in a browser` ); } - log.info(`Found Chromium revision ${revision} from Puppeteer ${kibanaPuppeteerVersion}`); - return revision; + log.info(`Found Chrome version ${version} from Puppeteer ${kibanaPuppeteerVersion}`); + return version; } -async function getChromiumCommit( - revision: ChromiumRevision, - log: ToolingLog -): Promise { - const url = `https://crrev.com/${revision}`; +async function getChromiumCommit(version: ChromeVersion, log: ToolingLog): Promise { + const url = `https://chromiumdash.appspot.com/fetch_version?version=${version}`; log.info(`Fetching ${url}`); - const pageText = await fetch(url); - const $ = cheerio.load(await pageText.text()); - - // get the commit from the page title - let commit: ChromiumCommit | null = null; - const matches = $('title') - .text() - .match(/\S{40}/); - if (matches != null) { - log.debug(`Parsed commit hash from page title: \`${$('title').text()}\``); - [commit] = matches; - } + const fetchResponse = await fetch(url); + const chromeJson: ChromiumDashVersionType = await fetchResponse.json(); + + const { + chromium_main_branch_position: revision, + hashes: { chromium: commit }, + } = chromeJson; + + ChromiumDashVersionSchema.validate({ + chromium_main_branch_position: revision, + hashes: { chromium: commit }, + }); if (commit == null) { throw new Error(`Could not find a Chromium commit! Check ${url} in a browser.`); } - const baseUrl = 'https://commondatastorage.googleapis.com/chromium-browser-snapshots'; - - log.info(`Found Chromium commit ${commit} from revision ${revision}.`); - log.info(`Mac x64 download: ${baseUrl}/Mac/${revision}/chrome-mac.zip`); - log.info(`Mac ARM download: ${baseUrl}/Mac_Arm/${revision}/chrome-mac.zip`); - log.info(`Windows x64 download: ${baseUrl}/Win/${revision}/chrome-win.zip`); + log.info(`Found Chromium revision ${revision} from version ${version}.`); + log.info(`Found Chromium commit ${commit} from revision ${revision}.`); return commit; } @@ -127,8 +121,8 @@ run( puppeteerVersion = await getPuppeteerRelease(log); } - const chromiumRevision = await getChromiumRevision(puppeteerVersion, log); - await getChromiumCommit(chromiumRevision, log); + const chromeVersion = await getChromeVersion(puppeteerVersion, log); + await getChromiumCommit(chromeVersion, log); } catch (err) { log.error(err); } diff --git a/src/dev/chromium_version/util.ts b/src/dev/chromium_version/util.ts new file mode 100644 index 000000000000..83cf5abff246 --- /dev/null +++ b/src/dev/chromium_version/util.ts @@ -0,0 +1,33 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +import { schema, TypeOf } from '@kbn/config-schema'; + +export type PuppeteerRelease = string; +export type ChromeVersion = string; +export type ChromiumCommit = string; + +export type PuppeteerPackageType = TypeOf; +export type ChromiumDashVersionType = TypeOf; + +export const PuppeteerPackageSchema = schema.object({ + version: schema.string(), +}); + +export const ChromiumDashVersionSchema = schema.object({ + chromium_main_branch_position: schema.number(), + hashes: schema.object({ + chromium: schema.string({ minLength: 40, maxLength: 40 }), + }), +}); + +// We forked the Puppeteer node module for Kibana, +// So we need to translate OUR version to the official Puppeteer Release +export const forkCompatibilityMap: Record = { + '5.4.1-patch.1': '5.4.1', +}; diff --git a/src/dev/tsconfig.json b/src/dev/tsconfig.json index b4fb983d63e5..73d4dfab3b73 100644 --- a/src/dev/tsconfig.json +++ b/src/dev/tsconfig.json @@ -39,5 +39,6 @@ "@kbn/get-repo-files", "@kbn/import-locator", "@kbn/journeys", + "@kbn/config-schema", ] } diff --git a/x-pack/build_chromium/linux/args.gn b/x-pack/build_chromium/linux/args.gn index 29ec3207c854..01af3bf766f7 100644 --- a/x-pack/build_chromium/linux/args.gn +++ b/x-pack/build_chromium/linux/args.gn @@ -24,4 +24,4 @@ enable_vr = false use_kerberos = false target_os = "linux" -# target_cpu is added at build timeure a minimal build +# target_cpu is added at build time diff --git a/x-pack/plugins/screenshotting/server/browsers/chromium/driver_factory/args.test.ts b/x-pack/plugins/screenshotting/server/browsers/chromium/driver_factory/args.test.ts index 9d888100ca10..f77b84ccfed2 100644 --- a/x-pack/plugins/screenshotting/server/browsers/chromium/driver_factory/args.test.ts +++ b/x-pack/plugins/screenshotting/server/browsers/chromium/driver_factory/args.test.ts @@ -49,7 +49,7 @@ describe('headless webgl arm mac workaround', () => { // if you're updating this, then you're likely updating chromium // please double-check that the --use-angle flag is still needed for arm macs - // if you're updating to v112, then likely instead of --use-angle you should use --enable-gpu - expect(getChromiumPackage().binaryChecksum).toBe('0a12a34a0d8bc9c616d3cc339abb167e'); // just putting this here so that someone updating the chromium version will see this comment + // instead of --use-angle you may need --enable-gpu + expect(getChromiumPackage().binaryChecksum).toBe('4cc4ee072b23e4a65e714ff543eea21b'); // just putting this here so that someone updating the chromium version will see this comment }); }); diff --git a/x-pack/plugins/screenshotting/server/browsers/chromium/driver_factory/args.ts b/x-pack/plugins/screenshotting/server/browsers/chromium/driver_factory/args.ts index e0f14606c91a..fc1bf51d7749 100644 --- a/x-pack/plugins/screenshotting/server/browsers/chromium/driver_factory/args.ts +++ b/x-pack/plugins/screenshotting/server/browsers/chromium/driver_factory/args.ts @@ -75,9 +75,8 @@ export const args = ({ // Since chromium v111 headless mode in arm based macs is not working with `--disable-gpu` // This is a known issue: headless uses swiftshader by default and swiftshader's support for WebGL is currently disabled on Arm pending the resolution of https://issuetracker.google.com/issues/165000222. - // As a workaround, we force hardware GL drivers on mac. - // The best way to do this starting with v112 is by passing --enable-gpu, - // v111 and older versions should work with --use-angle. + // As a workaround, we force hardware GL drivers on mac: v111 and older versions should work with --use-angle. + // The best way to do this when the issue is resolved will be to pass --enable-gpu, if (os.arch() === 'arm64' && process.platform === 'darwin') { flags.push('--use-angle'); } else { diff --git a/x-pack/plugins/screenshotting/server/browsers/chromium/driver_factory/index.ts b/x-pack/plugins/screenshotting/server/browsers/chromium/driver_factory/index.ts index 5acc6616236d..53f0fd5d642b 100644 --- a/x-pack/plugins/screenshotting/server/browsers/chromium/driver_factory/index.ts +++ b/x-pack/plugins/screenshotting/server/browsers/chromium/driver_factory/index.ts @@ -166,6 +166,7 @@ export class HeadlessChromiumDriverFactory { env: { TZ: browserTimezone, }, + headless: 'new', }); } catch (err) { observer.error( diff --git a/x-pack/plugins/screenshotting/server/browsers/chromium/integration_tests/downloads.test.ts b/x-pack/plugins/screenshotting/server/browsers/chromium/integration_tests/downloads.test.ts new file mode 100644 index 000000000000..17031c9dc8c7 --- /dev/null +++ b/x-pack/plugins/screenshotting/server/browsers/chromium/integration_tests/downloads.test.ts @@ -0,0 +1,72 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +import { loggingSystemMock } from '@kbn/core-logging-server-mocks'; +import assert from 'assert'; +import axios from 'axios'; +import path from 'path'; +import { PackageInfo } from '..'; +import { paths as chromiumArchivePaths } from '../../../utils'; +import { download } from '../../download'; +import { install } from '../../install'; + +/* eslint-disable no-console */ + +const mockLogger = loggingSystemMock.create().get(); +mockLogger.warn = jest.fn((message: string | Error) => { + console.warn(message); +}); +mockLogger.debug = jest.fn((message: string | Error) => { + console.log(message); +}); +mockLogger.error = jest.fn((message: string | Error) => { + console.error(message); +}); + +/** + * NOTE: these test cases download actual browsers. Running the suite could take + * a long time with a clean cache. + */ + +// test case tuples +const packageInfos = chromiumArchivePaths.packages.map(({ platform, architecture }) => [ + architecture, + platform, +]); + +describe.each(packageInfos)('Chromium archive: %s/%s', (architecture, platform) => { + // For testing, suffix the unzip folder by cpu + platform so the extracted folders do not overwrite each other in the cache + const chromiumPath = path.resolve(__dirname, '../../../../chromium', architecture, platform); + + const originalAxios = axios.defaults.adapter; + beforeAll(async () => { + axios.defaults.adapter = require('axios/lib/adapters/http'); // allow Axios to send actual requests + }); + + afterAll(() => { + axios.defaults.adapter = originalAxios; + }); + + // Allow package definition to be altered to check error handling + const originalPkg = chromiumArchivePaths.packages.find( + (packageInfo) => packageInfo.platform === platform && packageInfo.architecture === architecture + ); + assert(originalPkg); + + let pkg: PackageInfo = originalPkg; + beforeEach(() => { + pkg = { ...originalPkg }; + }); + + it('references the correct checksums and binary path', async () => { + const downloadedChecksum = await download(chromiumArchivePaths, pkg, mockLogger); + expect(downloadedChecksum).toBe(pkg.archiveChecksum); + + const binaryPath = await install(chromiumArchivePaths, mockLogger, pkg, chromiumPath); + expect(binaryPath).toBe(path.join(chromiumPath, pkg.binaryRelativePath)); + }); +}); diff --git a/x-pack/plugins/screenshotting/server/browsers/chromium/paths.ts b/x-pack/plugins/screenshotting/server/browsers/chromium/paths.ts index 9ee669a297db..9556aa453f26 100644 --- a/x-pack/plugins/screenshotting/server/browsers/chromium/paths.ts +++ b/x-pack/plugins/screenshotting/server/browsers/chromium/paths.ts @@ -16,10 +16,9 @@ export interface PackageInfo { binaryRelativePath: string; isPreInstalled: boolean; location: 'custom' | 'common'; + revision: number; } -const REVISION = 1095492; - enum BaseUrl { // see https://www.chromium.org/getting-involved/download-chromium common = 'https://commondatastorage.googleapis.com/chromium-browser-snapshots', @@ -45,9 +44,10 @@ export class ChromiumArchivePaths { platform: 'darwin', architecture: 'x64', archiveFilename: 'chrome-mac.zip', - archiveChecksum: '318ac652b5ba64fb3b37a25e312ffd6e', - binaryChecksum: '107a554a0f7828a1844173cb3830716c', + archiveChecksum: '2ce969500158dd98e3ad4502dbb6b13c', + binaryChecksum: '9960dd00ab27b4e9ee1455692bb65701', binaryRelativePath: 'chrome-mac/Chromium.app/Contents/MacOS/Chromium', + revision: 1121448, location: 'common', archivePath: 'Mac', isPreInstalled: false, @@ -56,9 +56,10 @@ export class ChromiumArchivePaths { platform: 'darwin', architecture: 'arm64', archiveFilename: 'chrome-mac.zip', - archiveChecksum: 'e8f09d0c992d181b986d38a13dfb88c3', - binaryChecksum: '0a12a34a0d8bc9c616d3cc339abb167e', + archiveChecksum: 'a4fea96b155483e0617d909c9b2cb32a', + binaryChecksum: '4cc4ee072b23e4a65e714ff543eea21b', binaryRelativePath: 'chrome-mac/Chromium.app/Contents/MacOS/Chromium', + revision: 1121443, location: 'common', archivePath: 'Mac_Arm', isPreInstalled: false, @@ -66,20 +67,22 @@ export class ChromiumArchivePaths { { platform: 'linux', architecture: 'x64', - archiveFilename: 'chromium-7abd50c-locales-linux_x64.zip', - archiveChecksum: 'dc141a6cae734c29a1144d3d9f8ca7ee', - binaryChecksum: '9b9611ba0c65fc34d1be1e40ae80c036', + archiveFilename: 'chromium-38c3182-locales-linux_x64.zip', + archiveChecksum: '9635c58ccd7a6260dcfc0be7fa1545f6', + binaryChecksum: '02b21e91e39eb9aa68bbb4fedfa73204', binaryRelativePath: 'headless_shell-linux_x64/headless_shell', + revision: 1121455, location: 'custom', isPreInstalled: true, }, { platform: 'linux', architecture: 'arm64', - archiveFilename: 'chromium-7abd50c-locales-linux_arm64.zip', - archiveChecksum: '1ce431a6cd7b3d7e5aa63fc8f7327b0f', - binaryChecksum: 'ef21a88efa18f000e6da6d9c51ee2fd7', + archiveFilename: 'chromium-38c3182-locales-linux_arm64.zip', + archiveChecksum: '96dca82cccea6ae82aaf0bc46104a501', + binaryChecksum: '753e07c59b6f269b2f06091155d53f4b', binaryRelativePath: 'headless_shell-linux_arm64/headless_shell', + revision: 1121455, location: 'custom', isPreInstalled: true, }, @@ -87,9 +90,10 @@ export class ChromiumArchivePaths { platform: 'win32', architecture: 'x64', archiveFilename: 'chrome-win.zip', - archiveChecksum: '83e7e89ae749668d3eaa8b3bd6120e8a', - binaryChecksum: 'cbfe0d2db3117f13554999bdc7aab68d', + archiveChecksum: '76b6c8aa15f0b16df18f793c9953f59f', + binaryChecksum: '762fe3b0ffe86d525bb3ed91f870cb7a', binaryRelativePath: path.join('chrome-win', 'chrome.exe'), + revision: 1121435, location: 'common', archivePath: 'Win', isPreInstalled: true, @@ -114,7 +118,9 @@ export class ChromiumArchivePaths { public getDownloadUrl(p: PackageInfo) { if (isCommonPackage(p)) { - return `${BaseUrl.common}/${p.archivePath}/${REVISION}/${p.archiveFilename}`; + const { common } = BaseUrl; + const { archivePath, revision, archiveFilename } = p; + return `${common}/${archivePath}/${revision}/${archiveFilename}`; } return BaseUrl.custom + '/' + p.archiveFilename; // revision is not used for URL if package is a custom build } diff --git a/x-pack/plugins/screenshotting/server/browsers/download/index.ts b/x-pack/plugins/screenshotting/server/browsers/download/index.ts index b488d1bfe32f..ff8de66df743 100644 --- a/x-pack/plugins/screenshotting/server/browsers/download/index.ts +++ b/x-pack/plugins/screenshotting/server/browsers/download/index.ts @@ -12,14 +12,18 @@ import type { ChromiumArchivePaths, PackageInfo } from '../chromium'; import { md5 } from './checksum'; import { fetch } from './fetch'; +type ValidChecksum = string; + /** * Clears the unexpected files in the browsers archivesPath * and ensures that all packages/archives are downloaded and * that their checksums match the declared value - * @param {BrowserSpec} browsers - * @return {Promise} */ -export async function download(paths: ChromiumArchivePaths, pkg: PackageInfo, logger?: Logger) { +export async function download( + paths: ChromiumArchivePaths, + pkg: PackageInfo, + logger?: Logger +): Promise { const removedFiles = await del(`${paths.archivesPath}/**/*`, { force: true, onlyFiles: true, @@ -41,9 +45,9 @@ export async function download(paths: ChromiumArchivePaths, pkg: PackageInfo, lo const pathExists = existsSync(resolvedPath); if (pathExists && foundChecksum === archiveChecksum) { logger?.debug( - `Browser archive for ${pkg.platform}/${pkg.architecture} already found in ${resolvedPath}.` + `Browser archive for ${pkg.platform}/${pkg.architecture} already found in ${resolvedPath} with matching checksum.` ); - return; + return foundChecksum; } if (!pathExists) { @@ -60,11 +64,12 @@ export async function download(paths: ChromiumArchivePaths, pkg: PackageInfo, lo } const url = paths.getDownloadUrl(pkg); + let downloadedChecksum: string | undefined; try { - const downloadedChecksum = await fetch(url, resolvedPath, logger); + downloadedChecksum = await fetch(url, resolvedPath, logger); if (downloadedChecksum !== archiveChecksum) { logger?.warn( - `Invalid checksum for ${pkg.platform}/${pkg.architecture}: ` + + `Invalid archive checksum for ${pkg.platform}/${pkg.architecture}: ` + `expected ${archiveChecksum} got ${downloadedChecksum}` ); invalidChecksums.push(`${url} => ${resolvedPath}`); @@ -75,7 +80,7 @@ export async function download(paths: ChromiumArchivePaths, pkg: PackageInfo, lo if (invalidChecksums.length) { const error = new Error( - `Error downloading browsers, checksums incorrect for:\n - ${invalidChecksums.join( + `Error downloading browsers, archive checksums incorrect for:\n - ${invalidChecksums.join( '\n - ' )}` ); @@ -83,4 +88,6 @@ export async function download(paths: ChromiumArchivePaths, pkg: PackageInfo, lo throw error; } + + return downloadedChecksum; } diff --git a/x-pack/plugins/screenshotting/server/browsers/install.ts b/x-pack/plugins/screenshotting/server/browsers/install.ts index 42a7999cab18..351aa875dc48 100644 --- a/x-pack/plugins/screenshotting/server/browsers/install.ts +++ b/x-pack/plugins/screenshotting/server/browsers/install.ts @@ -13,6 +13,8 @@ import { download } from './download'; import { md5 } from './download/checksum'; import { extract } from './extract'; +type BinaryPath = string; + /** * "install" a browser by type into installs path by extracting the downloaded * archive. If there is an error extracting the archive an `ExtractError` is thrown @@ -22,32 +24,48 @@ export async function install( logger: Logger, pkg: PackageInfo, chromiumPath: string = path.resolve(__dirname, '../../chromium') -): Promise { +): Promise { + const { architecture, platform } = pkg; const binaryPath = paths.getBinaryPath(pkg, chromiumPath); const binaryChecksum = await md5(binaryPath).catch(() => 'MISSING'); - if (binaryChecksum !== pkg.binaryChecksum) { - logger?.warn( - `Found browser binary checksum for ${pkg.platform}/${pkg.architecture} in ${binaryPath}` + - ` is ${binaryChecksum} but ${pkg.binaryChecksum} was expected. Re-installing...` + if (binaryChecksum === pkg.binaryChecksum) { + // validated a previously extracted browser binary + logger.info(`Browser executable: ${binaryPath}`); + return binaryPath; + } + + logger?.warn( + `Found browser binary checksum for ${platform}/${architecture} in ${binaryPath}` + + ` is ${binaryChecksum} but ${pkg.binaryChecksum} was expected. Re-installing...` + ); + try { + await del(chromiumPath); + } catch (error) { + logger.error(error); + } + + try { + await download(paths, pkg, logger); + const archive = path.join(paths.archivesPath, architecture, pkg.archiveFilename); + logger.info(`Extracting [${archive}] to [${chromiumPath}]`); + await extract(archive, chromiumPath); + } catch (error) { + logger.error(error); + } + + // check the newly extracted browser binary + const downloadedBinaryChecksum = await md5(binaryPath).catch(() => 'MISSING'); + if (downloadedBinaryChecksum !== pkg.binaryChecksum) { + const error = new Error( + `Error installing browsers, binary checksums incorrect for [${architecture}/${platform}]` ); - try { - await del(chromiumPath); - } catch (error) { - logger.error(error); - } - - try { - await download(paths, pkg, logger); - const archive = path.join(paths.archivesPath, pkg.architecture, pkg.archiveFilename); - logger.info(`Extracting [${archive}] to [${chromiumPath}]`); - await extract(archive, chromiumPath); - } catch (error) { - logger.error(error); - } + logger?.error(error); + + throw error; } - logger.info(`Browser executable: ${binaryPath}`); + logger.info(`Browser executable has been installed: ${binaryPath}`); return binaryPath; } diff --git a/x-pack/plugins/screenshotting/tsconfig.json b/x-pack/plugins/screenshotting/tsconfig.json index b2e1fd6ed184..9110ea16661c 100644 --- a/x-pack/plugins/screenshotting/tsconfig.json +++ b/x-pack/plugins/screenshotting/tsconfig.json @@ -22,6 +22,7 @@ "@kbn/i18n", "@kbn/utils", "@kbn/safer-lodash-set", + "@kbn/core-logging-server-mocks", ], "exclude": [ "target/**/*", diff --git a/yarn.lock b/yarn.lock index ba1403669258..676d904817f0 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6480,6 +6480,20 @@ resolved "https://registry.yarnpkg.com/@protobufjs/utf8/-/utf8-1.1.0.tgz#a777360b5b39a1a2e5106f8e858f2fd2d060c570" integrity sha1-p3c2C1s5oaLlEG+OhY8v0tBgxXA= +"@puppeteer/browsers@1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@puppeteer/browsers/-/browsers-1.0.0.tgz#89de56a718c922857b1d802aac473ebbe1f54d99" + integrity sha512-YKecOIlwH0UsiM9zkKy31DYg11iD8NhOoQ7SQ4oCpwDSd1Ud31WYRoAldbVlVBj9b4hLJIXxn7XSnkH1ta1tpA== + dependencies: + debug "4.3.4" + extract-zip "2.0.1" + https-proxy-agent "5.0.1" + progress "2.0.3" + proxy-from-env "1.1.0" + tar-fs "2.1.1" + unbzip2-stream "1.4.3" + yargs "17.7.1" + "@redux-saga/core@^1.1.3": version "1.1.3" resolved "https://registry.yarnpkg.com/@redux-saga/core/-/core-1.1.3.tgz#3085097b57a4ea8db5528d58673f20ce0950f6a4" @@ -11993,10 +12007,10 @@ chromedriver@^113.0.0: proxy-from-env "^1.1.0" tcp-port-used "^1.0.1" -chromium-bidi@0.4.4: - version "0.4.4" - resolved "https://registry.yarnpkg.com/chromium-bidi/-/chromium-bidi-0.4.4.tgz#44f25d4fa5d2f3debc3fc3948d0657194cac4407" - integrity sha512-4BX5cSaponuvVT1+SbLYTOAgDoVtX/Khoc9UsbFJ/AsPVUeFAM3RiIDFI6XFhLYMi9WmVJqh1ZH+dRpNKkKwiQ== +chromium-bidi@0.4.7: + version "0.4.7" + resolved "https://registry.yarnpkg.com/chromium-bidi/-/chromium-bidi-0.4.7.tgz#4c022c2b0fb1d1c9b571fadf373042160e71d236" + integrity sha512-6+mJuFXwTMU6I3vYLs6IL8A1DyQTPjCfIL971X0aMPVGRbGnNfl6i6Cl0NMbxi2bRYLGESt9T2ZIMRM5PAEcIQ== dependencies: mitt "3.0.0" @@ -12667,10 +12681,10 @@ core-util-is@1.0.2, core-util-is@^1.0.2, core-util-is@~1.0.0: resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= -cosmiconfig@8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-8.0.0.tgz#e9feae014eab580f858f8a0288f38997a7bebe97" - integrity sha512-da1EafcpH6b/TD8vDRaWV7xFINlHlF6zKsGwS1TsuVJTZRkquaS5HTMq7uq6h31619QjbsYl21gVDOm32KM1vQ== +cosmiconfig@8.1.3: + version "8.1.3" + resolved "https://registry.yarnpkg.com/cosmiconfig/-/cosmiconfig-8.1.3.tgz#0e614a118fcc2d9e5afc2f87d53cd09931015689" + integrity sha512-/UkO2JKI18b5jVMJUp0lvKFMpa/Gye+ZgZjKD+DGEN9y7NRcf/nK1A0sp67ONmKtnDCNMS44E6jrk0Yc3bDuUw== dependencies: import-fresh "^3.2.1" js-yaml "^4.1.0" @@ -13909,10 +13923,10 @@ detective@^5.0.2: defined "^1.0.0" minimist "^1.1.1" -devtools-protocol@0.0.1094867: - version "0.0.1094867" - resolved "https://registry.yarnpkg.com/devtools-protocol/-/devtools-protocol-0.0.1094867.tgz#2ab93908e9376bd85d4e0604aa2651258f13e374" - integrity sha512-pmMDBKiRVjh0uKK6CT1WqZmM3hBVSgD+N2MrgyV1uNizAZMw4tx6i/RTc+/uCsKSCmg0xXx7arCP/OFcIwTsiQ== +devtools-protocol@0.0.1120988: + version "0.0.1120988" + resolved "https://registry.yarnpkg.com/devtools-protocol/-/devtools-protocol-0.0.1120988.tgz#8fe49088919ae3b8df7235774633763f1f925066" + integrity sha512-39fCpE3Z78IaIPChJsP6Lhmkbf4dWXOmzLk/KFTdRkNk/0JymRIfUynDVRndV9HoDz8PyalK1UH21ST/ivwW5Q== dezalgo@^1.0.0: version "1.0.3" @@ -23756,33 +23770,34 @@ pupa@^2.1.1: dependencies: escape-goat "^2.0.0" -puppeteer-core@19.7.2: - version "19.7.2" - resolved "https://registry.yarnpkg.com/puppeteer-core/-/puppeteer-core-19.7.2.tgz#deee9ef915829b6a1d1a3a008625c29eeb251161" - integrity sha512-PvI+fXqgP0uGJxkyZcX51bnzjFA73MODZOAv0fSD35yR7tvbqwtMV3/Y+hxQ0AMMwzxkEebP6c7po/muqxJvmQ== +puppeteer-core@20.1.0: + version "20.1.0" + resolved "https://registry.yarnpkg.com/puppeteer-core/-/puppeteer-core-20.1.0.tgz#c74e21ad642b0adb273da83b4bf444fdecc6500f" + integrity sha512-/xTvabzAN4mnnuYkJCuWNnnEhOb3JrBTa3sY6qVi1wybuIEk5ODRg8Z5PPiKUGiKC9iG7GWOJ5CjF3iuMuxZSA== dependencies: - chromium-bidi "0.4.4" + "@puppeteer/browsers" "1.0.0" + chromium-bidi "0.4.7" cross-fetch "3.1.5" debug "4.3.4" - devtools-protocol "0.0.1094867" + devtools-protocol "0.0.1120988" extract-zip "2.0.1" https-proxy-agent "5.0.1" proxy-from-env "1.1.0" - rimraf "3.0.2" tar-fs "2.1.1" unbzip2-stream "1.4.3" - ws "8.11.0" + ws "8.13.0" -puppeteer@19.7.2: - version "19.7.2" - resolved "https://registry.yarnpkg.com/puppeteer/-/puppeteer-19.7.2.tgz#1b3ce99a093cc2f8f84dfb06f066d0757ea79d4b" - integrity sha512-4Lm7Qpe/LU95Svirei/jDLDvR5oMrl9BPGd7HMY5+Q28n+BhvKuW97gKkR+1LlI86bO8J3g8rG/Ll5kv9J1nlQ== +puppeteer@20.1.0: + version "20.1.0" + resolved "https://registry.yarnpkg.com/puppeteer/-/puppeteer-20.1.0.tgz#30331e2729b235b3306a39cab3ad5b0cf2b90e7d" + integrity sha512-kZp1eYScK1IpHxkgnDaFSGKKCzt27iZfsxO6Xlv/cklzYrhobxTK9/PxzCacPCrYnxNQwKwHzHLPOCuSyjw1jg== dependencies: - cosmiconfig "8.0.0" + "@puppeteer/browsers" "1.0.0" + cosmiconfig "8.1.3" https-proxy-agent "5.0.1" progress "2.0.3" proxy-from-env "1.1.0" - puppeteer-core "19.7.2" + puppeteer-core "20.1.0" q@^1.5.1: version "1.5.1" @@ -25339,13 +25354,6 @@ rgbcolor@^1.0.1: resolved "https://registry.yarnpkg.com/rgbcolor/-/rgbcolor-1.0.1.tgz#d6505ecdb304a6595da26fa4b43307306775945d" integrity sha1-1lBezbMEplldom+ktDMHMGd1lF0= -rimraf@3.0.2, rimraf@^3.0.0, rimraf@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" - integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== - dependencies: - glob "^7.1.3" - rimraf@^2.2.8, rimraf@^2.5.4, rimraf@^2.6.2, rimraf@^2.6.3: version "2.7.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" @@ -25353,6 +25361,13 @@ rimraf@^2.2.8, rimraf@^2.5.4, rimraf@^2.6.2, rimraf@^2.6.3: dependencies: glob "^7.1.3" +rimraf@^3.0.0, rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + ripemd160@^2.0.0, ripemd160@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.1.tgz#0f4584295c53a3628af7e6d79aca21ce57d1c6e7" @@ -29573,10 +29588,10 @@ write-file-atomic@^4.0.1: imurmurhash "^0.1.4" signal-exit "^3.0.7" -ws@8.11.0: - version "8.11.0" - resolved "https://registry.yarnpkg.com/ws/-/ws-8.11.0.tgz#6a0d36b8edfd9f96d8b25683db2f8d7de6e8e143" - integrity sha512-HPG3wQd9sNQoT9xHyNCXoDUa+Xw/VevmY9FoHyQ+g+rrMn4j6FB4np7Z0OhdTgjx6MgQLK7jwSy1YecU1+4Asg== +ws@8.13.0: + version "8.13.0" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.13.0.tgz#9a9fb92f93cf41512a0735c8f4dd09b8a1211cd0" + integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA== ws@>=8.11.0, ws@^8.2.3, ws@^8.4.2, ws@^8.9.0: version "8.12.0" @@ -29780,6 +29795,19 @@ yargs@16.2.0, yargs@^16.2.0: y18n "^5.0.5" yargs-parser "^20.2.2" +yargs@17.7.1: + version "17.7.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.7.1.tgz#34a77645201d1a8fc5213ace787c220eabbd0967" + integrity sha512-cwiTb08Xuv5fqF4AovYacTFNxk62th7LKJ6BL9IGUpTJrWoU7/7WdQGTP2SjKf1dUNBGzDd28p/Yfs/GI6JrLw== + dependencies: + cliui "^8.0.1" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.1.1" + yargs@^13.3.0: version "13.3.2" resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd"