diff --git a/x-pack/plugins/ingest_manager/common/types/models/epm.ts b/x-pack/plugins/ingest_manager/common/types/models/epm.ts index d2d1f22dda3a0..ea7fd60d1fa3f 100644 --- a/x-pack/plugins/ingest_manager/common/types/models/epm.ts +++ b/x-pack/plugins/ingest_manager/common/types/models/epm.ts @@ -20,6 +20,7 @@ export enum InstallStatus { } export type InstallType = 'reinstall' | 'reupdate' | 'rollback' | 'update' | 'install'; +export type InstallSource = 'registry' | 'upload'; export type EpmPackageInstallStatus = 'installed' | 'installing'; @@ -49,10 +50,8 @@ export enum AgentAssetType { export type RegistryRelease = 'ga' | 'beta' | 'experimental'; -// from /package/{name} -// type Package struct at https://github.com/elastic/package-registry/blob/master/util/package.go -// https://github.com/elastic/package-registry/blob/master/docs/api/package.json -export interface RegistryPackage { +// Fields common to packages that come from direct upload and the registry +export interface InstallablePackage { name: string; title?: string; version: string; @@ -61,7 +60,6 @@ export interface RegistryPackage { description: string; type: string; categories: string[]; - requirement: RequirementsByServiceName; screenshots?: RegistryImage[]; icons?: RegistryImage[]; assets?: string[]; @@ -69,6 +67,17 @@ export interface RegistryPackage { format_version: string; data_streams?: RegistryDataStream[]; policy_templates?: RegistryPolicyTemplate[]; +} + +// Uploaded package archives don't have extra fields +// Linter complaint disabled because this extra type is meant for better code readability +// eslint-disable-next-line @typescript-eslint/no-empty-interface +export interface ArchivePackage extends InstallablePackage {} + +// Registry packages do have extra fields. +// cf. type Package struct at https://github.com/elastic/package-registry/blob/master/util/package.go +export interface RegistryPackage extends InstallablePackage { + requirement: RequirementsByServiceName; download: string; path: string; } @@ -240,6 +249,7 @@ export interface Installation extends SavedObjectAttributes { install_status: EpmPackageInstallStatus; install_version: string; install_started_at: string; + install_source: InstallSource; } export type Installable = Installed | NotInstalled; diff --git a/x-pack/plugins/ingest_manager/server/errors/handlers.test.ts b/x-pack/plugins/ingest_manager/server/errors/handlers.test.ts index 361386a86d547..272d95c0b3688 100644 --- a/x-pack/plugins/ingest_manager/server/errors/handlers.test.ts +++ b/x-pack/plugins/ingest_manager/server/errors/handlers.test.ts @@ -13,6 +13,7 @@ import { IngestManagerError, RegistryError, PackageNotFoundError, + PackageUnsupportedMediaTypeError, defaultIngestErrorHandler, } from './index'; @@ -101,6 +102,25 @@ describe('defaultIngestErrorHandler', () => { expect(mockContract.logger?.error).toHaveBeenCalledWith(error.message); }); + it('415: PackageUnsupportedMediaType', async () => { + const error = new PackageUnsupportedMediaTypeError('123'); + const response = httpServerMock.createResponseFactory(); + + await defaultIngestErrorHandler({ error, response }); + + // response + expect(response.ok).toHaveBeenCalledTimes(0); + expect(response.customError).toHaveBeenCalledTimes(1); + expect(response.customError).toHaveBeenCalledWith({ + statusCode: 415, + body: { message: error.message }, + }); + + // logging + expect(mockContract.logger?.error).toHaveBeenCalledTimes(1); + expect(mockContract.logger?.error).toHaveBeenCalledWith(error.message); + }); + it('404: PackageNotFoundError', async () => { const error = new PackageNotFoundError('123'); const response = httpServerMock.createResponseFactory(); diff --git a/x-pack/plugins/ingest_manager/server/errors/handlers.ts b/x-pack/plugins/ingest_manager/server/errors/handlers.ts index b621f2dd29331..bcad3f9c022da 100644 --- a/x-pack/plugins/ingest_manager/server/errors/handlers.ts +++ b/x-pack/plugins/ingest_manager/server/errors/handlers.ts @@ -13,7 +13,12 @@ import { } from 'src/core/server'; import { errors as LegacyESErrors } from 'elasticsearch'; import { appContextService } from '../services'; -import { IngestManagerError, RegistryError, PackageNotFoundError } from './index'; +import { + IngestManagerError, + RegistryError, + PackageNotFoundError, + PackageUnsupportedMediaTypeError, +} from './index'; type IngestErrorHandler = ( params: IngestErrorHandlerParams @@ -52,6 +57,9 @@ const getHTTPResponseCode = (error: IngestManagerError): number => { if (error instanceof PackageNotFoundError) { return 404; // Not Found } + if (error instanceof PackageUnsupportedMediaTypeError) { + return 415; // Unsupported Media Type + } return 400; // Bad Request }; diff --git a/x-pack/plugins/ingest_manager/server/errors/index.ts b/x-pack/plugins/ingest_manager/server/errors/index.ts index f495bf551dcff..15ac97f21a17a 100644 --- a/x-pack/plugins/ingest_manager/server/errors/index.ts +++ b/x-pack/plugins/ingest_manager/server/errors/index.ts @@ -18,3 +18,7 @@ export class RegistryConnectionError extends RegistryError {} export class RegistryResponseError extends RegistryError {} export class PackageNotFoundError extends IngestManagerError {} export class PackageOutdatedError extends IngestManagerError {} +export class PackageUnsupportedMediaTypeError extends IngestManagerError {} +export class PackageInvalidArchiveError extends IngestManagerError {} +export class PackageCacheError extends IngestManagerError {} +export class PackageOperationNotSupportedError extends IngestManagerError {} diff --git a/x-pack/plugins/ingest_manager/server/routes/epm/handlers.ts b/x-pack/plugins/ingest_manager/server/routes/epm/handlers.ts index c55979d187f9d..0aa8641fd2a3e 100644 --- a/x-pack/plugins/ingest_manager/server/routes/epm/handlers.ts +++ b/x-pack/plugins/ingest_manager/server/routes/epm/handlers.ts @@ -8,7 +8,6 @@ import { RequestHandler, CustomHttpResponseOptions } from 'src/core/server'; import { GetInfoResponse, InstallPackageResponse, - MessageResponse, DeletePackageResponse, GetCategoriesResponse, GetPackagesResponse, @@ -35,8 +34,9 @@ import { getFile, getPackageInfo, handleInstallPackageFailure, - installPackage, isBulkInstallError, + installPackageFromRegistry, + installPackageByUpload, removeInstallation, getLimitedPackages, getInstallationObject, @@ -148,7 +148,7 @@ export const installPackageFromRegistryHandler: RequestHandler< const { pkgName, pkgVersion } = splitPkgKey(pkgkey); const installedPkg = await getInstallationObject({ savedObjectsClient, pkgName }); try { - const res = await installPackage({ + const res = await installPackageFromRegistry({ savedObjectsClient, pkgkey, callCluster, @@ -212,10 +212,24 @@ export const installPackageByUploadHandler: RequestHandler< undefined, TypeOf > = async (context, request, response) => { - const body: MessageResponse = { - response: 'package upload was received ok, but not installed (not implemented yet)', - }; - return response.ok({ body }); + const savedObjectsClient = context.core.savedObjects.client; + const callCluster = context.core.elasticsearch.legacy.client.callAsCurrentUser; + const contentType = request.headers['content-type'] as string; // from types it could also be string[] or undefined but this is checked later + const archiveBuffer = Buffer.from(request.body); + try { + const res = await installPackageByUpload({ + savedObjectsClient, + callCluster, + archiveBuffer, + contentType, + }); + const body: InstallPackageResponse = { + response: res, + }; + return response.ok({ body }); + } catch (error) { + return defaultIngestErrorHandler({ error, response }); + } }; export const deletePackageHandler: RequestHandler, + Installation +> = (installationDoc) => { + installationDoc.attributes.install_source = 'registry'; + + return installationDoc; +}; diff --git a/x-pack/plugins/ingest_manager/server/services/epm/archive/index.ts b/x-pack/plugins/ingest_manager/server/services/epm/archive/index.ts new file mode 100644 index 0000000000000..91ed489b3a5bb --- /dev/null +++ b/x-pack/plugins/ingest_manager/server/services/epm/archive/index.ts @@ -0,0 +1,332 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ + +import yaml from 'js-yaml'; +import { uniq } from 'lodash'; + +import { + ArchivePackage, + RegistryPolicyTemplate, + RegistryDataStream, + RegistryInput, + RegistryStream, + RegistryVarsEntry, +} from '../../../../common/types'; +import { PackageInvalidArchiveError, PackageUnsupportedMediaTypeError } from '../../../errors'; +import { pkgToPkgKey } from '../registry'; +import { cacheGet, cacheSet, setArchiveFilelist } from '../registry/cache'; +import { unzipBuffer, untarBuffer, ArchiveEntry } from '../registry/extract'; + +export async function loadArchivePackage({ + archiveBuffer, + contentType, +}: { + archiveBuffer: Buffer; + contentType: string; +}): Promise<{ paths: string[]; archivePackageInfo: ArchivePackage }> { + const paths = await unpackArchiveToCache(archiveBuffer, contentType); + const archivePackageInfo = parseAndVerifyArchive(paths); + setArchiveFilelist(archivePackageInfo.name, archivePackageInfo.version, paths); + + return { + paths, + archivePackageInfo, + }; +} + +function getBufferExtractorForContentType(contentType: string) { + if (contentType === 'application/gzip') { + return untarBuffer; + } else if (contentType === 'application/zip') { + return unzipBuffer; + } else { + throw new PackageUnsupportedMediaTypeError( + `Unsupported media type ${contentType}. Please use 'application/gzip' or 'application/zip'` + ); + } +} + +export async function unpackArchiveToCache( + archiveBuffer: Buffer, + contentType: string, + filter = (entry: ArchiveEntry): boolean => true +): Promise { + const bufferExtractor = getBufferExtractorForContentType(contentType); + const paths: string[] = []; + try { + await bufferExtractor(archiveBuffer, filter, (entry: ArchiveEntry) => { + const { path, buffer } = entry; + // skip directories + if (path.slice(-1) === '/') return; + if (buffer) { + cacheSet(path, buffer); + paths.push(path); + } + }); + } catch (error) { + throw new PackageInvalidArchiveError( + `Error during extraction of uploaded package: ${error}. Assumed content type was ${contentType}, check if this matches the archive type.` + ); + } + + // While unpacking a tar.gz file with unzipBuffer() will result in a thrown error in the try-catch above, + // unpacking a zip file with untarBuffer() just results in nothing. + if (paths.length === 0) { + throw new PackageInvalidArchiveError( + `Uploaded archive seems empty. Assumed content type was ${contentType}, check if this matches the archive type.` + ); + } + return paths; +} + +// TODO: everything below performs verification of manifest.yml files, and hence duplicates functionality already implemented in the +// package registry. At some point this should probably be replaced (or enhanced) with verification based on +// https://github.com/elastic/package-spec/ + +function parseAndVerifyArchive(paths: string[]): ArchivePackage { + // The top-level directory must match pkgName-pkgVersion, and no other top-level files or directories may be present + const toplevelDir = paths[0].split('/')[0]; + paths.forEach((path) => { + if (path.split('/')[0] !== toplevelDir) { + throw new PackageInvalidArchiveError('Package contains more than one top-level directory.'); + } + }); + + // The package must contain a manifest file ... + const manifestFile = `${toplevelDir}/manifest.yml`; + const manifestBuffer = cacheGet(manifestFile); + if (!paths.includes(manifestFile) || !manifestBuffer) { + throw new PackageInvalidArchiveError('Package must contain a top-level manifest.yml file.'); + } + + // ... which must be valid YAML + let manifest; + try { + manifest = yaml.load(manifestBuffer.toString()); + } catch (error) { + throw new PackageInvalidArchiveError(`Could not parse top-level package manifest: ${error}.`); + } + + // Package name and version from the manifest must match those from the toplevel directory + const pkgKey = pkgToPkgKey({ name: manifest.name, version: manifest.version }); + if (toplevelDir !== pkgKey) { + throw new PackageInvalidArchiveError( + `Name ${manifest.name} and version ${manifest.version} do not match top-level directory ${toplevelDir}` + ); + } + + const { name, version, description, type, categories, format_version: formatVersion } = manifest; + // check for mandatory fields + if (!(name && version && description && type && categories && formatVersion)) { + throw new PackageInvalidArchiveError( + 'Invalid top-level package manifest: one or more fields missing of name, version, description, type, categories, format_version' + ); + } + + const dataStreams = parseAndVerifyDataStreams(paths, name, version); + const policyTemplates = parseAndVerifyPolicyTemplates(manifest); + + return { + name, + version, + description, + type, + categories, + format_version: formatVersion, + data_streams: dataStreams, + policy_templates: policyTemplates, + }; +} + +function parseAndVerifyDataStreams( + paths: string[], + pkgName: string, + pkgVersion: string +): RegistryDataStream[] { + // A data stream is made up of a subdirectory of name-version/data_stream/, containing a manifest.yml + let dataStreamPaths: string[] = []; + const dataStreams: RegistryDataStream[] = []; + const pkgKey = pkgToPkgKey({ name: pkgName, version: pkgVersion }); + + // pick all paths matching name-version/data_stream/DATASTREAM_PATH/... + // from those, pick all unique data stream paths + paths + .filter((path) => path.startsWith(`${pkgKey}/data_stream/`)) + .forEach((path) => { + const parts = path.split('/'); + if (parts.length > 2 && parts[2]) dataStreamPaths.push(parts[2]); + }); + + dataStreamPaths = uniq(dataStreamPaths); + + dataStreamPaths.forEach((dataStreamPath) => { + const manifestFile = `${pkgKey}/data_stream/${dataStreamPath}/manifest.yml`; + const manifestBuffer = cacheGet(manifestFile); + if (!paths.includes(manifestFile) || !manifestBuffer) { + throw new PackageInvalidArchiveError( + `No manifest.yml file found for data stream '${dataStreamPath}'` + ); + } + + let manifest; + try { + manifest = yaml.load(manifestBuffer.toString()); + } catch (error) { + throw new PackageInvalidArchiveError( + `Could not parse package manifest for data stream '${dataStreamPath}': ${error}.` + ); + } + + const { + title: dataStreamTitle, + release, + ingest_pipeline: ingestPipeline, + type, + dataset, + } = manifest; + if (!(dataStreamTitle && release && type)) { + throw new PackageInvalidArchiveError( + `Invalid manifest for data stream '${dataStreamPath}': one or more fields missing of 'title', 'release', 'type'` + ); + } + const streams = parseAndVerifyStreams(manifest, dataStreamPath); + + // default ingest pipeline name see https://github.com/elastic/package-registry/blob/master/util/dataset.go#L26 + return dataStreams.push({ + dataset: dataset || `${pkgName}.${dataStreamPath}`, + title: dataStreamTitle, + release, + package: pkgName, + ingest_pipeline: ingestPipeline || 'default', + path: dataStreamPath, + type, + streams, + }); + }); + + return dataStreams; +} + +function parseAndVerifyStreams(manifest: any, dataStreamPath: string): RegistryStream[] { + const streams: RegistryStream[] = []; + const manifestStreams = manifest.streams; + if (manifestStreams && manifestStreams.length > 0) { + manifestStreams.forEach((manifestStream: any) => { + const { + input, + title: streamTitle, + description, + enabled, + vars: manifestVars, + template_path: templatePath, + } = manifestStream; + if (!(input && streamTitle)) { + throw new PackageInvalidArchiveError( + `Invalid manifest for data stream ${dataStreamPath}: stream is missing one or more fields of: input, title` + ); + } + const vars = parseAndVerifyVars(manifestVars, `data stream ${dataStreamPath}`); + // default template path name see https://github.com/elastic/package-registry/blob/master/util/dataset.go#L143 + streams.push({ + input, + title: streamTitle, + description, + enabled, + vars, + template_path: templatePath || 'stream.yml.hbs', + }); + }); + } + return streams; +} + +function parseAndVerifyVars(manifestVars: any[], location: string): RegistryVarsEntry[] { + const vars: RegistryVarsEntry[] = []; + if (manifestVars && manifestVars.length > 0) { + manifestVars.forEach((manifestVar) => { + const { + name, + title: varTitle, + description, + type, + required, + show_user: showUser, + multi, + def, + os, + } = manifestVar; + if (!(name && type)) { + throw new PackageInvalidArchiveError( + `Invalid var definition for ${location}: one of mandatory fields 'name' and 'type' missing in var: ${manifestVar}` + ); + } + vars.push({ + name, + title: varTitle, + description, + type, + required, + show_user: showUser, + multi, + default: def, + os, + }); + }); + } + return vars; +} + +function parseAndVerifyPolicyTemplates(manifest: any): RegistryPolicyTemplate[] { + const policyTemplates: RegistryPolicyTemplate[] = []; + const manifestPolicyTemplates = manifest.policy_templates; + if (manifestPolicyTemplates && manifestPolicyTemplates > 0) { + manifestPolicyTemplates.forEach((policyTemplate: any) => { + const { name, title: policyTemplateTitle, description, inputs, multiple } = policyTemplate; + if (!(name && policyTemplateTitle && description && inputs)) { + throw new PackageInvalidArchiveError( + `Invalid top-level manifest: one of mandatory fields 'name', 'title', 'description', 'input' missing in policy template: ${policyTemplate}` + ); + } + + const parsedInputs = parseAndVerifyInputs(inputs, `config template ${name}`); + + // defaults to true if undefined, but may be explicitly set to false. + let parsedMultiple = true; + if (typeof multiple === 'boolean' && multiple === false) parsedMultiple = false; + + policyTemplates.push({ + name, + title: policyTemplateTitle, + description, + inputs: parsedInputs, + multiple: parsedMultiple, + }); + }); + } + return policyTemplates; +} + +function parseAndVerifyInputs(manifestInputs: any, location: string): RegistryInput[] { + const inputs: RegistryInput[] = []; + if (manifestInputs && manifestInputs.length > 0) { + manifestInputs.forEach((input: any) => { + const { type, title: inputTitle, description, vars } = input; + if (!(type && inputTitle)) { + throw new PackageInvalidArchiveError( + `Invalid top-level manifest: one of mandatory fields 'type', 'title' missing in input: ${input}` + ); + } + const parsedVars = parseAndVerifyVars(vars, location); + inputs.push({ + type, + title: inputTitle, + description, + vars: parsedVars, + }); + }); + } + return inputs; +} diff --git a/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/ingest_pipeline/install.ts b/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/ingest_pipeline/install.ts index 6088bcb71f878..43c0179c0aa8a 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/ingest_pipeline/install.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/ingest_pipeline/install.ts @@ -9,7 +9,7 @@ import { EsAssetReference, RegistryDataStream, ElasticsearchAssetType, - RegistryPackage, + InstallablePackage, } from '../../../../types'; import * as Registry from '../../registry'; import { CallESAsCurrentUser } from '../../../../types'; @@ -22,7 +22,7 @@ interface RewriteSubstitution { } export const installPipelines = async ( - registryPackage: RegistryPackage, + installablePackage: InstallablePackage, paths: string[], callCluster: CallESAsCurrentUser, savedObjectsClient: SavedObjectsClientContract @@ -30,7 +30,7 @@ export const installPipelines = async ( // unlike other ES assets, pipeline names are versioned so after a template is updated // it can be created pointing to the new template, without removing the old one and effecting data // so do not remove the currently installed pipelines here - const dataStreams = registryPackage.data_streams; + const dataStreams = installablePackage.data_streams; if (!dataStreams?.length) return []; const pipelinePaths = paths.filter((path) => isPipeline(path)); // get and save pipeline refs before installing pipelines @@ -43,14 +43,14 @@ export const installPipelines = async ( const nameForInstallation = getPipelineNameForInstallation({ pipelineName: name, dataStream, - packageVersion: registryPackage.version, + packageVersion: installablePackage.version, }); return { id: nameForInstallation, type: ElasticsearchAssetType.ingestPipeline }; }); acc.push(...pipelineObjectRefs); return acc; }, []); - await saveInstalledEsRefs(savedObjectsClient, registryPackage.name, pipelineRefs); + await saveInstalledEsRefs(savedObjectsClient, installablePackage.name, pipelineRefs); const pipelines = dataStreams.reduce>>((acc, dataStream) => { if (dataStream.ingest_pipeline) { acc.push( @@ -58,7 +58,7 @@ export const installPipelines = async ( dataStream, callCluster, paths: pipelinePaths, - pkgVersion: registryPackage.version, + pkgVersion: installablePackage.version, }) ); } diff --git a/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/template/install.ts b/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/template/install.ts index 8f80feb268910..d32d5b8093c52 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/template/install.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/template/install.ts @@ -8,10 +8,10 @@ import Boom from 'boom'; import { SavedObjectsClientContract } from 'src/core/server'; import { RegistryDataStream, - RegistryPackage, ElasticsearchAssetType, TemplateRef, RegistryElasticsearch, + InstallablePackage, } from '../../../../types'; import { CallESAsCurrentUser } from '../../../../types'; import { Field, loadFieldsFromYaml, processFields } from '../../fields/field'; @@ -21,7 +21,7 @@ import * as Registry from '../../registry'; import { removeAssetsFromInstalledEsByType, saveInstalledEsRefs } from '../../packages/install'; export const installTemplates = async ( - registryPackage: RegistryPackage, + installablePackage: InstallablePackage, callCluster: CallESAsCurrentUser, paths: string[], savedObjectsClient: SavedObjectsClientContract @@ -35,11 +35,11 @@ export const installTemplates = async ( // remove package installation's references to index templates await removeAssetsFromInstalledEsByType( savedObjectsClient, - registryPackage.name, + installablePackage.name, ElasticsearchAssetType.indexTemplate ); // build templates per data stream from yml files - const dataStreams = registryPackage.data_streams; + const dataStreams = installablePackage.data_streams; if (!dataStreams) return []; // get template refs to save const installedTemplateRefs = dataStreams.map((dataStream) => ({ @@ -48,14 +48,14 @@ export const installTemplates = async ( })); // add package installation's references to index templates - await saveInstalledEsRefs(savedObjectsClient, registryPackage.name, installedTemplateRefs); + await saveInstalledEsRefs(savedObjectsClient, installablePackage.name, installedTemplateRefs); if (dataStreams) { const installTemplatePromises = dataStreams.reduce>>( (acc, dataStream) => { acc.push( installTemplateForDataStream({ - pkg: registryPackage, + pkg: installablePackage, callCluster, dataStream, }) @@ -171,7 +171,7 @@ export async function installTemplateForDataStream({ callCluster, dataStream, }: { - pkg: RegistryPackage; + pkg: InstallablePackage; callCluster: CallESAsCurrentUser; dataStream: RegistryDataStream; }): Promise { diff --git a/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/transform/install.ts b/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/transform/install.ts index d8aff10492595..89811783a7f79 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/transform/install.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/elasticsearch/transform/install.ts @@ -11,7 +11,7 @@ import * as Registry from '../../registry'; import { ElasticsearchAssetType, EsAssetReference, - RegistryPackage, + InstallablePackage, } from '../../../../../common/types/models'; import { CallESAsCurrentUser } from '../../../../types'; import { getInstallation } from '../../packages'; @@ -24,14 +24,14 @@ interface TransformInstallation { } export const installTransform = async ( - registryPackage: RegistryPackage, + installablePackage: InstallablePackage, paths: string[], callCluster: CallESAsCurrentUser, savedObjectsClient: SavedObjectsClientContract ) => { const installation = await getInstallation({ savedObjectsClient, - pkgName: registryPackage.name, + pkgName: installablePackage.name, }); let previousInstalledTransformEsAssets: EsAssetReference[] = []; if (installation) { @@ -46,13 +46,13 @@ export const installTransform = async ( previousInstalledTransformEsAssets.map((asset) => asset.id) ); - const installNameSuffix = `${registryPackage.version}`; + const installNameSuffix = `${installablePackage.version}`; const transformPaths = paths.filter((path) => isTransform(path)); let installedTransforms: EsAssetReference[] = []; if (transformPaths.length > 0) { const transformRefs = transformPaths.reduce((acc, path) => { acc.push({ - id: getTransformNameForInstallation(registryPackage, path, installNameSuffix), + id: getTransformNameForInstallation(installablePackage, path, installNameSuffix), type: ElasticsearchAssetType.transform, }); @@ -60,11 +60,15 @@ export const installTransform = async ( }, []); // get and save transform refs before installing transforms - await saveInstalledEsRefs(savedObjectsClient, registryPackage.name, transformRefs); + await saveInstalledEsRefs(savedObjectsClient, installablePackage.name, transformRefs); const transforms: TransformInstallation[] = transformPaths.map((path: string) => { return { - installationName: getTransformNameForInstallation(registryPackage, path, installNameSuffix), + installationName: getTransformNameForInstallation( + installablePackage, + path, + installNameSuffix + ), content: getAsset(path).toString('utf-8'), }; }); @@ -79,14 +83,14 @@ export const installTransform = async ( if (previousInstalledTransformEsAssets.length > 0) { const currentInstallation = await getInstallation({ savedObjectsClient, - pkgName: registryPackage.name, + pkgName: installablePackage.name, }); // remove the saved object reference await deleteTransformRefs( savedObjectsClient, currentInstallation?.installed_es || [], - registryPackage.name, + installablePackage.name, previousInstalledTransformEsAssets.map((asset) => asset.id), installedTransforms.map((installed) => installed.id) ); @@ -123,12 +127,12 @@ async function handleTransformInstall({ } const getTransformNameForInstallation = ( - registryPackage: RegistryPackage, + installablePackage: InstallablePackage, path: string, suffix: string ) => { const pathPaths = path.split('/'); const filename = pathPaths?.pop()?.split('.')[0]; const folderName = pathPaths?.pop(); - return `${registryPackage.name}.${folderName}-${filename}-${suffix}`; + return `${installablePackage.name}.${folderName}-${filename}-${suffix}`; }; diff --git a/x-pack/plugins/ingest_manager/server/services/epm/fields/field.ts b/x-pack/plugins/ingest_manager/server/services/epm/fields/field.ts index 5913302e77ba6..06091ab3cedb0 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/fields/field.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/fields/field.ts @@ -5,7 +5,7 @@ */ import { safeLoad } from 'js-yaml'; -import { RegistryPackage } from '../../../types'; +import { InstallablePackage } from '../../../types'; import { getAssetsData } from '../packages/assets'; // This should become a copy of https://github.com/elastic/beats/blob/d9a4c9c240a9820fab15002592e5bb6db318543b/libbeat/mapping/field.go#L39 @@ -253,7 +253,7 @@ const isFields = (path: string) => { */ export const loadFieldsFromYaml = async ( - pkg: RegistryPackage, + pkg: InstallablePackage, datasetName?: string ): Promise => { // Fetch all field definition files diff --git a/x-pack/plugins/ingest_manager/server/services/epm/kibana/index_pattern/install.ts b/x-pack/plugins/ingest_manager/server/services/epm/kibana/index_pattern/install.ts index bde542412f123..2aa28d23cf857 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/kibana/index_pattern/install.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/kibana/index_pattern/install.ts @@ -89,7 +89,9 @@ export async function installIndexPatterns( // TODO: move to install package // cache all installed packages if they don't exist const packagePromises = installedPackages.map((pkg) => - Registry.ensureCachedArchiveInfo(pkg.pkgName, pkg.pkgVersion) + // TODO: this hard-codes 'registry' as installSource, so uploaded packages are ignored + // and their fields will be removed from the generated index patterns after this runs. + Registry.ensureCachedArchiveInfo(pkg.pkgName, pkg.pkgVersion, 'registry') ); await Promise.all(packagePromises); diff --git a/x-pack/plugins/ingest_manager/server/services/epm/packages/assets.test.ts b/x-pack/plugins/ingest_manager/server/services/epm/packages/assets.test.ts index 78b42b03be831..eb43bef72db70 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/packages/assets.test.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/packages/assets.test.ts @@ -4,34 +4,41 @@ * you may not use this file except in compliance with the Elastic License. */ -import { RegistryPackage } from '../../../types'; +import { InstallablePackage } from '../../../types'; import { getAssets } from './assets'; +import { getArchiveFilelist } from '../registry/cache'; + +jest.mock('../registry/cache', () => { + return { + getArchiveFilelist: jest.fn(), + }; +}); + +const mockedGetArchiveFilelist = getArchiveFilelist as jest.Mock; +mockedGetArchiveFilelist.mockImplementation(() => [ + 'coredns-1.0.1/data_stream/log/elasticsearch/ingest-pipeline/pipeline-plaintext.json', + 'coredns-1.0.1/data_stream/log/elasticsearch/ingest-pipeline/pipeline-json.json', +]); const tests = [ { package: { - assets: [ - '/package/coredns/1.0.1/data_stream/log/elasticsearch/ingest-pipeline/pipeline-plaintext.json', - '/package/coredns/1.0.1/data_stream/log/elasticsearch/ingest-pipeline/pipeline-json.json', - ], - path: '/package/coredns/1.0.1', + name: 'coredns', + version: '1.0.1', }, dataset: 'log', filter: (path: string) => { return true; }, expected: [ - '/package/coredns/1.0.1/data_stream/log/elasticsearch/ingest-pipeline/pipeline-plaintext.json', - '/package/coredns/1.0.1/data_stream/log/elasticsearch/ingest-pipeline/pipeline-json.json', + 'coredns-1.0.1/data_stream/log/elasticsearch/ingest-pipeline/pipeline-plaintext.json', + 'coredns-1.0.1/data_stream/log/elasticsearch/ingest-pipeline/pipeline-json.json', ], }, { package: { - assets: [ - '/package/coredns-1.0.1/data_stream/log/elasticsearch/ingest-pipeline/pipeline-plaintext.json', - '/package/coredns-1.0.1/data_stream/log/elasticsearch/ingest-pipeline/pipeline-json.json', - ], - path: '/package/coredns/1.0.1', + name: 'coredns', + version: '1.0.1', }, // Non existant dataset dataset: 'foo', @@ -42,10 +49,8 @@ const tests = [ }, { package: { - assets: [ - '/package/coredns-1.0.1/data_stream/log/elasticsearch/ingest-pipeline/pipeline-plaintext.json', - '/package/coredns-1.0.1/data_stream/log/elasticsearch/ingest-pipeline/pipeline-json.json', - ], + name: 'coredns', + version: '1.0.1', }, // Filter which does not exist filter: (path: string) => { @@ -57,8 +62,8 @@ const tests = [ test('testGetAssets', () => { for (const value of tests) { - // as needed to pretent it is a RegistryPackage - const assets = getAssets(value.package as RegistryPackage, value.filter, value.dataset); + // as needed to pretend it is an InstallablePackage + const assets = getAssets(value.package as InstallablePackage, value.filter, value.dataset); expect(assets).toStrictEqual(value.expected); } }); diff --git a/x-pack/plugins/ingest_manager/server/services/epm/packages/assets.ts b/x-pack/plugins/ingest_manager/server/services/epm/packages/assets.ts index a8abc12917781..856f04c0c9b67 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/packages/assets.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/packages/assets.ts @@ -4,9 +4,9 @@ * you may not use this file except in compliance with the Elastic License. */ -import { RegistryPackage } from '../../../types'; +import { InstallablePackage } from '../../../types'; import * as Registry from '../registry'; -import { ensureCachedArchiveInfo } from '../registry'; +import { getArchiveFilelist } from '../registry/cache'; // paths from RegistryPackage are routes to the assets on EPR // e.g. `/package/nginx/1.2.0/data_stream/access/fields/fields.yml` @@ -14,30 +14,26 @@ import { ensureCachedArchiveInfo } from '../registry'; // e.g. `nginx-1.2.0/data_stream/access/fields/fields.yml` // RegistryPackage paths have a `/package/` prefix compared to ArchiveEntry paths // and different package and version structure -const EPR_PATH_PREFIX = '/package'; -function registryPathToArchivePath(registryPath: RegistryPackage['path']): string { - const path = registryPath.replace(`${EPR_PATH_PREFIX}/`, ''); - const [pkgName, pkgVersion] = path.split('/'); - return path.replace(`${pkgName}/${pkgVersion}`, `${pkgName}-${pkgVersion}`); -} export function getAssets( - packageInfo: RegistryPackage, + packageInfo: InstallablePackage, filter = (path: string): boolean => true, datasetName?: string ): string[] { const assets: string[] = []; - if (!packageInfo?.assets) return assets; + const paths = getArchiveFilelist(packageInfo.name, packageInfo.version); + // TODO: might be better to throw a PackageCacheError here + if (!paths || paths.length === 0) return assets; // Skip directories - for (const path of packageInfo.assets) { + for (const path of paths) { if (path.endsWith('/')) { continue; } // if dataset, filter for them if (datasetName) { - const comparePath = `${packageInfo.path}/data_stream/${datasetName}/`; + const comparePath = `${packageInfo.name}-${packageInfo.version}/data_stream/${datasetName}/`; if (!path.includes(comparePath)) { continue; } @@ -52,21 +48,20 @@ export function getAssets( } export async function getAssetsData( - packageInfo: RegistryPackage, + packageInfo: InstallablePackage, filter = (path: string): boolean => true, datasetName?: string ): Promise { // TODO: Needs to be called to fill the cache but should not be required - await ensureCachedArchiveInfo(packageInfo.name, packageInfo.version); + await Registry.ensureCachedArchiveInfo(packageInfo.name, packageInfo.version, 'registry'); // Gather all asset data const assets = getAssets(packageInfo, filter, datasetName); - const entries: Registry.ArchiveEntry[] = assets.map((registryPath) => { - const archivePath = registryPathToArchivePath(registryPath); - const buffer = Registry.getAsset(archivePath); + const entries: Registry.ArchiveEntry[] = assets.map((path) => { + const buffer = Registry.getAsset(path); - return { path: registryPath, buffer }; + return { path, buffer }; }); return entries; diff --git a/x-pack/plugins/ingest_manager/server/services/epm/packages/ensure_installed_default_packages.test.ts b/x-pack/plugins/ingest_manager/server/services/epm/packages/ensure_installed_default_packages.test.ts index f0b487ad59774..aaff5df39bac3 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/packages/ensure_installed_default_packages.test.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/packages/ensure_installed_default_packages.test.ts @@ -49,6 +49,7 @@ const mockInstallation: SavedObject = { install_status: 'installed', install_version: '1.0.0', install_started_at: new Date().toISOString(), + install_source: 'registry', }, }; diff --git a/x-pack/plugins/ingest_manager/server/services/epm/packages/get.ts b/x-pack/plugins/ingest_manager/server/services/epm/packages/get.ts index 2d11b6157804f..74ee25eace736 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/packages/get.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/packages/get.ts @@ -101,11 +101,14 @@ export async function getPackageInfo(options: { pkgVersion: string; }): Promise { const { savedObjectsClient, pkgName, pkgVersion } = options; - const [item, savedObject, latestPackage, assets] = await Promise.all([ - Registry.fetchInfo(pkgName, pkgVersion), + const [ + savedObject, + latestPackage, + { paths: assets, registryPackageInfo: item }, + ] = await Promise.all([ getInstallationObject({ savedObjectsClient, pkgName }), Registry.fetchFindLatestPackage(pkgName), - Registry.getArchiveInfo(pkgName, pkgVersion), + Registry.loadRegistryPackage(pkgName, pkgVersion), ]); // add properties that aren't (or aren't yet) on Registry response diff --git a/x-pack/plugins/ingest_manager/server/services/epm/packages/get_install_type.test.ts b/x-pack/plugins/ingest_manager/server/services/epm/packages/get_install_type.test.ts index cce4b7fee8fd7..a04bfaafe7570 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/packages/get_install_type.test.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/packages/get_install_type.test.ts @@ -21,6 +21,7 @@ const mockInstallation: SavedObject = { install_status: 'installed', install_version: '1.0.0', install_started_at: new Date().toISOString(), + install_source: 'registry', }, }; const mockInstallationUpdateFail: SavedObject = { @@ -37,6 +38,7 @@ const mockInstallationUpdateFail: SavedObject = { install_status: 'installing', install_version: '1.0.1', install_started_at: new Date().toISOString(), + install_source: 'registry', }, }; diff --git a/x-pack/plugins/ingest_manager/server/services/epm/packages/index.ts b/x-pack/plugins/ingest_manager/server/services/epm/packages/index.ts index 94aa969c2d2b8..92070f3c2fafc 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/packages/index.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/packages/index.ts @@ -27,9 +27,10 @@ export { export { BulkInstallResponse, - handleInstallPackageFailure, - installPackage, IBulkInstallPackageError, + handleInstallPackageFailure, + installPackageFromRegistry, + installPackageByUpload, ensureInstalledPackage, } from './install'; export { removeInstallation } from './remove'; diff --git a/x-pack/plugins/ingest_manager/server/services/epm/packages/install.ts b/x-pack/plugins/ingest_manager/server/services/epm/packages/install.ts index d7262ebb66b2e..a7514d1075d78 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/packages/install.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/packages/install.ts @@ -8,7 +8,7 @@ import { SavedObject, SavedObjectsClientContract } from 'src/core/server'; import semver from 'semver'; import Boom from 'boom'; import { UnwrapPromise } from '@kbn/utility-types'; -import { BulkInstallPackageInfo } from '../../../../common'; +import { BulkInstallPackageInfo, InstallablePackage, InstallSource } from '../../../../common'; import { PACKAGES_SAVED_OBJECT_TYPE, MAX_TIME_COMPLETE_INSTALL } from '../../../constants'; import { AssetReference, @@ -42,10 +42,15 @@ import { } from '../kibana/assets/install'; import { updateCurrentWriteIndices } from '../elasticsearch/template/template'; import { deleteKibanaSavedObjectsAssets, removeInstallation } from './remove'; -import { IngestManagerError, PackageOutdatedError } from '../../../errors'; +import { + IngestManagerError, + PackageOperationNotSupportedError, + PackageOutdatedError, +} from '../../../errors'; import { getPackageSavedObjects } from './get'; import { installTransform } from '../elasticsearch/transform/install'; import { appContextService } from '../../app_context'; +import { loadArchivePackage } from '../archive'; export async function installLatestPackage(options: { savedObjectsClient: SavedObjectsClientContract; @@ -59,7 +64,7 @@ export async function installLatestPackage(options: { name: latestPackage.name, version: latestPackage.version, }); - return installPackage({ savedObjectsClient, pkgkey, callCluster }); + return installPackageFromRegistry({ savedObjectsClient, pkgkey, callCluster }); } catch (err) { throw err; } @@ -155,7 +160,7 @@ export async function handleInstallPackageFailure({ } const prevVersion = `${pkgName}-${installedPkg.attributes.version}`; logger.error(`rolling back to ${prevVersion} after error installing ${pkgkey}`); - await installPackage({ + await installPackageFromRegistry({ savedObjectsClient, pkgkey: prevVersion, callCluster, @@ -193,7 +198,7 @@ export async function upgradePackage({ }); try { - const assets = await installPackage({ savedObjectsClient, pkgkey, callCluster }); + const assets = await installPackageFromRegistry({ savedObjectsClient, pkgkey, callCluster }); return { name: pkgToUpgrade, newVersion: latestPkg.version, @@ -232,7 +237,7 @@ interface InstallPackageParams { force?: boolean; } -export async function installPackage({ +export async function installPackageFromRegistry({ savedObjectsClient, pkgkey, callCluster, @@ -254,12 +259,96 @@ export async function installPackage({ if (semver.lt(pkgVersion, latestPackage.version) && !force && !installOutOfDateVersionOk) { throw new PackageOutdatedError(`${pkgkey} is out-of-date and cannot be installed or updated`); } - const paths = await Registry.getArchiveInfo(pkgName, pkgVersion); - const registryPackageInfo = await Registry.fetchInfo(pkgName, pkgVersion); + + const { paths, registryPackageInfo } = await Registry.loadRegistryPackage(pkgName, pkgVersion); const removable = !isRequiredPackage(pkgName); const { internal = false } = registryPackageInfo; - const toSaveESIndexPatterns = generateESIndexPatterns(registryPackageInfo.data_streams); + const installSource = 'registry'; + + return installPackage({ + savedObjectsClient, + callCluster, + pkgName, + pkgVersion, + installedPkg, + paths, + removable, + internal, + packageInfo: registryPackageInfo, + installType, + installSource, + }); +} + +export async function installPackageByUpload({ + savedObjectsClient, + callCluster, + archiveBuffer, + contentType, +}: { + savedObjectsClient: SavedObjectsClientContract; + callCluster: CallESAsCurrentUser; + archiveBuffer: Buffer; + contentType: string; +}): Promise { + const { paths, archivePackageInfo } = await loadArchivePackage({ archiveBuffer, contentType }); + const installedPkg = await getInstallationObject({ + savedObjectsClient, + pkgName: archivePackageInfo.name, + }); + const installType = getInstallType({ pkgVersion: archivePackageInfo.version, installedPkg }); + if (installType !== 'install') { + throw new PackageOperationNotSupportedError( + `Package upload only supports fresh installations. Package ${archivePackageInfo.name} is already installed, please uninstall first.` + ); + } + + const removable = !isRequiredPackage(archivePackageInfo.name); + const { internal = false } = archivePackageInfo; + const installSource = 'upload'; + + return installPackage({ + savedObjectsClient, + callCluster, + pkgName: archivePackageInfo.name, + pkgVersion: archivePackageInfo.version, + installedPkg, + paths, + removable, + internal, + packageInfo: archivePackageInfo, + installType, + installSource, + }); +} + +async function installPackage({ + savedObjectsClient, + callCluster, + pkgName, + pkgVersion, + installedPkg, + paths, + removable, + internal, + packageInfo, + installType, + installSource, +}: { + savedObjectsClient: SavedObjectsClientContract; + callCluster: CallESAsCurrentUser; + pkgName: string; + pkgVersion: string; + installedPkg?: SavedObject; + paths: string[]; + removable: boolean; + internal: boolean; + packageInfo: InstallablePackage; + installType: InstallType; + installSource: InstallSource; +}): Promise { + const toSaveESIndexPatterns = generateESIndexPatterns(packageInfo.data_streams); // add the package installation to the saved object. // if some installation already exists, just update install info @@ -273,12 +362,14 @@ export async function installPackage({ installed_kibana: [], installed_es: [], toSaveESIndexPatterns, + installSource, }); } else { await savedObjectsClient.update(PACKAGES_SAVED_OBJECT_TYPE, pkgName, { install_version: pkgVersion, install_status: 'installing', install_started_at: new Date().toISOString(), + install_source: installSource, }); } const installIndexPatternPromise = installIndexPatterns(savedObjectsClient, pkgName, pkgVersion); @@ -309,14 +400,14 @@ export async function installPackage({ // installs versionized pipelines without removing currently installed ones const installedPipelines = await installPipelines( - registryPackageInfo, + packageInfo, paths, callCluster, savedObjectsClient ); // install or update the templates referencing the newly installed pipelines const installedTemplates = await installTemplates( - registryPackageInfo, + packageInfo, callCluster, paths, savedObjectsClient @@ -326,7 +417,7 @@ export async function installPackage({ await updateCurrentWriteIndices(callCluster, installedTemplates); const installedTransforms = await installTransform( - registryPackageInfo, + packageInfo, paths, callCluster, savedObjectsClient @@ -388,6 +479,7 @@ export async function createInstallation(options: { installed_kibana: KibanaAssetReference[]; installed_es: EsAssetReference[]; toSaveESIndexPatterns: Record; + installSource: InstallSource; }) { const { savedObjectsClient, @@ -398,6 +490,7 @@ export async function createInstallation(options: { installed_kibana: installedKibana, installed_es: installedEs, toSaveESIndexPatterns, + installSource, } = options; await savedObjectsClient.create( PACKAGES_SAVED_OBJECT_TYPE, @@ -412,6 +505,7 @@ export async function createInstallation(options: { install_version: pkgVersion, install_status: 'installing', install_started_at: new Date().toISOString(), + install_source: installSource, }, { id: pkgName, overwrite: true } ); @@ -477,7 +571,7 @@ export async function ensurePackagesCompletedInstall( const pkgkey = `${pkg.attributes.name}-${pkg.attributes.install_version}`; // reinstall package if (elapsedTime > MAX_TIME_COMPLETE_INSTALL) { - acc.push(installPackage({ savedObjectsClient, pkgkey, callCluster })); + acc.push(installPackageFromRegistry({ savedObjectsClient, pkgkey, callCluster })); } return acc; }, []); diff --git a/x-pack/plugins/ingest_manager/server/services/epm/packages/remove.ts b/x-pack/plugins/ingest_manager/server/services/epm/packages/remove.ts index 2434ebf27aa5d..417f2871a6cbf 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/packages/remove.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/packages/remove.ts @@ -18,7 +18,7 @@ import { deletePipeline } from '../elasticsearch/ingest_pipeline/'; import { installIndexPatterns } from '../kibana/index_pattern/install'; import { deleteTransforms } from '../elasticsearch/transform/remove'; import { packagePolicyService, appContextService } from '../..'; -import { splitPkgKey, deletePackageCache, getArchiveInfo } from '../registry'; +import { splitPkgKey, deletePackageCache } from '../registry'; export async function removeInstallation(options: { savedObjectsClient: SavedObjectsClientContract; @@ -57,8 +57,7 @@ export async function removeInstallation(options: { // remove the package archive and its contents from the cache so that a reinstall fetches // a fresh copy from the registry - const paths = await getArchiveInfo(pkgName, pkgVersion); - deletePackageCache(pkgName, pkgVersion, paths); + deletePackageCache(pkgName, pkgVersion); // successful delete's in SO client return {}. return something more useful return installedAssets; diff --git a/x-pack/plugins/ingest_manager/server/services/epm/registry/cache.ts b/x-pack/plugins/ingest_manager/server/services/epm/registry/cache.ts index b7c1e8c2069d6..695db9db73fa2 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/registry/cache.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/registry/cache.ts @@ -12,12 +12,12 @@ export const cacheHas = (key: string) => cache.has(key); export const cacheClear = () => cache.clear(); export const cacheDelete = (key: string) => cache.delete(key); -const archiveLocationCache: Map = new Map(); -export const getArchiveLocation = (name: string, version: string) => - archiveLocationCache.get(pkgToPkgKey({ name, version })); +const archiveFilelistCache: Map = new Map(); +export const getArchiveFilelist = (name: string, version: string) => + archiveFilelistCache.get(pkgToPkgKey({ name, version })); -export const setArchiveLocation = (name: string, version: string, location: string) => - archiveLocationCache.set(pkgToPkgKey({ name, version }), location); +export const setArchiveFilelist = (name: string, version: string, paths: string[]) => + archiveFilelistCache.set(pkgToPkgKey({ name, version }), paths); -export const deleteArchiveLocation = (name: string, version: string) => - archiveLocationCache.delete(pkgToPkgKey({ name, version })); +export const deleteArchiveFilelist = (name: string, version: string) => + archiveFilelistCache.delete(pkgToPkgKey({ name, version })); diff --git a/x-pack/plugins/ingest_manager/server/services/epm/registry/index.test.ts b/x-pack/plugins/ingest_manager/server/services/epm/registry/index.test.ts index 2fd9175549026..ba51636c13f36 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/registry/index.test.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/registry/index.test.ts @@ -6,17 +6,8 @@ import { AssetParts } from '../../../types'; import { getBufferExtractor, pathParts, splitPkgKey } from './index'; -import { getArchiveLocation } from './cache'; import { untarBuffer, unzipBuffer } from './extract'; -jest.mock('./cache', () => { - return { - getArchiveLocation: jest.fn(), - }; -}); - -const mockedGetArchiveLocation = getArchiveLocation as jest.Mock; - const testPaths = [ { path: 'foo-1.1.0/service/type/file.yml', @@ -92,19 +83,13 @@ describe('splitPkgKey tests', () => { }); describe('getBufferExtractor', () => { - it('throws if the archive has not been downloaded/cached yet', () => { - expect(() => getBufferExtractor('missing', '1.2.3')).toThrow('no archive location'); - }); - it('returns unzipBuffer if the archive key ends in .zip', () => { - mockedGetArchiveLocation.mockImplementation(() => '.zip'); - const extractor = getBufferExtractor('will-use-mocked-key', 'a.b.c'); + const extractor = getBufferExtractor('.zip'); expect(extractor).toBe(unzipBuffer); }); it('returns untarBuffer if the key ends in anything else', () => { - mockedGetArchiveLocation.mockImplementation(() => 'xyz'); - const extractor = getBufferExtractor('will-use-mocked-key', 'a.b.c'); + const extractor = getBufferExtractor('.xyz'); expect(extractor).toBe(untarBuffer); }); }); diff --git a/x-pack/plugins/ingest_manager/server/services/epm/registry/index.ts b/x-pack/plugins/ingest_manager/server/services/epm/registry/index.ts index 22f1b670b2cc4..66f28fe58599a 100644 --- a/x-pack/plugins/ingest_manager/server/services/epm/registry/index.ts +++ b/x-pack/plugins/ingest_manager/server/services/epm/registry/index.ts @@ -12,6 +12,7 @@ import { AssetsGroupedByServiceByType, CategoryId, CategorySummaryList, + InstallSource, KibanaAssetType, RegistryPackage, RegistrySearchResults, @@ -21,17 +22,16 @@ import { cacheGet, cacheSet, cacheDelete, - cacheHas, - getArchiveLocation, - setArchiveLocation, - deleteArchiveLocation, + getArchiveFilelist, + setArchiveFilelist, + deleteArchiveFilelist, } from './cache'; import { ArchiveEntry, untarBuffer, unzipBuffer } from './extract'; import { fetchUrl, getResponse, getResponseStream } from './requests'; import { streamToBuffer } from './streams'; import { getRegistryUrl } from './registry_url'; import { appContextService } from '../..'; -import { PackageNotFoundError } from '../../../errors'; +import { PackageNotFoundError, PackageCacheError } from '../../../errors'; export { ArchiveEntry } from './extract'; @@ -132,14 +132,14 @@ export async function fetchCategories(params?: CategoriesParams): Promise true ): Promise { const paths: string[] = []; - const archiveBuffer = await getOrFetchArchiveBuffer(pkgName, pkgVersion); - const bufferExtractor = getBufferExtractor(pkgName, pkgVersion); + const { archiveBuffer, archivePath } = await fetchArchiveBuffer(pkgName, pkgVersion); + const bufferExtractor = getBufferExtractor(archivePath); await bufferExtractor(archiveBuffer, filter, (entry: ArchiveEntry) => { const { path, buffer } = entry; const { file } = pathParts(path); @@ -153,6 +153,22 @@ export async function getArchiveInfo( return paths; } +export async function loadRegistryPackage( + pkgName: string, + pkgVersion: string +): Promise<{ paths: string[]; registryPackageInfo: RegistryPackage }> { + let paths = getArchiveFilelist(pkgName, pkgVersion); + if (!paths || paths.length === 0) { + paths = await unpackRegistryPackageToCache(pkgName, pkgVersion); + setArchiveFilelist(pkgName, pkgVersion, paths); + } + + // TODO: cache this as well? + const registryPackageInfo = await fetchInfo(pkgName, pkgVersion); + + return { paths, registryPackageInfo }; +} + export function pathParts(path: string): AssetParts { let dataset; @@ -183,45 +199,39 @@ export function pathParts(path: string): AssetParts { } as AssetParts; } -export function getBufferExtractor(pkgName: string, pkgVersion: string) { - const archiveLocation = getArchiveLocation(pkgName, pkgVersion); - if (!archiveLocation) throw new Error(`no archive location for ${pkgName} ${pkgVersion}`); - const isZip = archiveLocation.endsWith('.zip'); +export function getBufferExtractor(archivePath: string) { + const isZip = archivePath.endsWith('.zip'); const bufferExtractor = isZip ? unzipBuffer : untarBuffer; return bufferExtractor; } -async function getOrFetchArchiveBuffer(pkgName: string, pkgVersion: string): Promise { - const key = getArchiveLocation(pkgName, pkgVersion); - let buffer = key && cacheGet(key); - if (!buffer) { - buffer = await fetchArchiveBuffer(pkgName, pkgVersion); - } - - if (buffer) { - return buffer; - } else { - throw new Error(`no archive buffer for ${key}`); - } -} - -export async function ensureCachedArchiveInfo(name: string, version: string) { - const pkgkey = getArchiveLocation(name, version); - if (!pkgkey || !cacheHas(pkgkey)) { - await getArchiveInfo(name, version); +export async function ensureCachedArchiveInfo( + name: string, + version: string, + installSource: InstallSource = 'registry' +) { + const paths = getArchiveFilelist(name, version); + if (!paths || paths.length === 0) { + if (installSource === 'registry') { + await loadRegistryPackage(name, version); + } else { + throw new PackageCacheError( + `Package ${name}-${version} not cached. If it was uploaded, try uninstalling and reinstalling manually.` + ); + } } } -async function fetchArchiveBuffer(pkgName: string, pkgVersion: string): Promise { +async function fetchArchiveBuffer( + pkgName: string, + pkgVersion: string +): Promise<{ archiveBuffer: Buffer; archivePath: string }> { const { download: archivePath } = await fetchInfo(pkgName, pkgVersion); const archiveUrl = `${getRegistryUrl()}${archivePath}`; - const buffer = await getResponseStream(archiveUrl).then(streamToBuffer); + const archiveBuffer = await getResponseStream(archiveUrl).then(streamToBuffer); - setArchiveLocation(pkgName, pkgVersion, archivePath); - cacheSet(archivePath, buffer); - - return buffer; + return { archiveBuffer, archivePath }; } export function getAsset(key: string) { @@ -250,16 +260,14 @@ export function groupPathsByService(paths: string[]): AssetsGroupedByServiceByTy }; } -export const deletePackageCache = (name: string, version: string, paths: string[]) => { - const archiveLocation = getArchiveLocation(name, version); - if (archiveLocation) { - // delete cached archive - cacheDelete(archiveLocation); +export const deletePackageCache = (name: string, version: string) => { + // get cached archive filelist + const paths = getArchiveFilelist(name, version); - // delete cached archive location - deleteArchiveLocation(name, version); - } - // delete cached archive contents - // this has been populated in Registry.getArchiveInfo() - paths.forEach((path) => cacheDelete(path)); + // delete cached archive filelist + deleteArchiveFilelist(name, version); + + // delete cached archive files + // this has been populated in unpackRegistryPackageToCache() + paths?.forEach((path) => cacheDelete(path)); }; diff --git a/x-pack/plugins/ingest_manager/server/types/index.tsx b/x-pack/plugins/ingest_manager/server/types/index.tsx index fc5ba1af196ad..0c070959e3b93 100644 --- a/x-pack/plugins/ingest_manager/server/types/index.tsx +++ b/x-pack/plugins/ingest_manager/server/types/index.tsx @@ -52,6 +52,7 @@ export { KibanaAssetReference, ElasticsearchAssetType, RegistryPackage, + InstallablePackage, AssetType, Installable, KibanaAssetType, @@ -68,6 +69,7 @@ export { Settings, SettingsSOAttributes, InstallType, + InstallSource, // Agent Request types PostAgentEnrollRequest, PostAgentCheckinRequest, diff --git a/x-pack/plugins/security_solution/common/endpoint/generate_data.ts b/x-pack/plugins/security_solution/common/endpoint/generate_data.ts index ec7a49da469fe..f0254616e6c9d 100644 --- a/x-pack/plugins/security_solution/common/endpoint/generate_data.ts +++ b/x-pack/plugins/security_solution/common/endpoint/generate_data.ts @@ -1215,6 +1215,7 @@ export class EndpointDocGenerator { install_version: '0.5.0', install_status: 'installed', install_started_at: '2020-06-24T14:41:23.098Z', + install_source: 'registry', }, references: [], updated_at: '2020-06-24T14:41:23.098Z', diff --git a/x-pack/test/ingest_manager_api_integration/apis/epm/install_by_upload.ts b/x-pack/test/ingest_manager_api_integration/apis/epm/install_by_upload.ts index e6d2affaec0cd..7fa0e0f38179a 100644 --- a/x-pack/test/ingest_manager_api_integration/apis/epm/install_by_upload.ts +++ b/x-pack/test/ingest_manager_api_integration/apis/epm/install_by_upload.ts @@ -9,18 +9,43 @@ import path from 'path'; import expect from '@kbn/expect'; import { FtrProviderContext } from '../../../api_integration/ftr_provider_context'; -import { warnAndSkipTest } from '../../helpers'; +import { skipIfNoDockerRegistry } from '../../helpers'; -export default function ({ getService }: FtrProviderContext) { +export default function (providerContext: FtrProviderContext) { + const { getService } = providerContext; const supertest = getService('supertest'); const dockerServers = getService('dockerServers'); - const log = getService('log'); const testPkgArchiveTgz = path.join( path.dirname(__filename), '../fixtures/direct_upload_packages/apache_0.1.4.tar.gz' ); - const testPkgKey = 'apache-0.14'; + const testPkgArchiveZip = path.join( + path.dirname(__filename), + '../fixtures/direct_upload_packages/apache_0.1.4.zip' + ); + const testPkgArchiveInvalidTwoToplevels = path.join( + path.dirname(__filename), + '../fixtures/direct_upload_packages/apache_invalid_two_toplevels_0.1.4.zip' + ); + const testPkgArchiveInvalidNoManifest = path.join( + path.dirname(__filename), + '../fixtures/direct_upload_packages/apache_invalid_no_manifest_0.1.4.zip' + ); + const testPkgArchiveInvalidManifestInvalidYaml = path.join( + path.dirname(__filename), + '../fixtures/direct_upload_packages/apache_invalid_manifest_invalid_yaml_0.1.4.zip' + ); + const testPkgArchiveInvalidManifestMissingField = path.join( + path.dirname(__filename), + '../fixtures/direct_upload_packages/apache_invalid_manifest_missing_field_0.1.4.zip' + ); + const testPkgArchiveInvalidToplevelMismatch = path.join( + path.dirname(__filename), + '../fixtures/direct_upload_packages/apache_invalid_toplevel_mismatch_0.1.4.zip' + ); + + const testPkgKey = 'apache-0.1.4'; const server = dockerServers.get('registry'); const deletePackage = async (pkgkey: string) => { @@ -28,28 +53,125 @@ export default function ({ getService }: FtrProviderContext) { }; describe('installs packages from direct upload', async () => { - after(async () => { - if (server.enabled) { + skipIfNoDockerRegistry(providerContext); + afterEach(async () => { + if (server) { // remove the package just in case it being installed will affect other tests await deletePackage(testPkgKey); } }); it('should install a tar archive correctly', async function () { - if (server.enabled) { - const buf = fs.readFileSync(testPkgArchiveTgz); - const res = await supertest - .post(`/api/ingest_manager/epm/packages`) - .set('kbn-xsrf', 'xxxx') - .type('application/gzip') - .send(buf) - .expect(200); - expect(res.body.response).to.equal( - 'package upload was received ok, but not installed (not implemented yet)' - ); - } else { - warnAndSkipTest(this, log); - } + const buf = fs.readFileSync(testPkgArchiveTgz); + const res = await supertest + .post(`/api/ingest_manager/epm/packages`) + .set('kbn-xsrf', 'xxxx') + .type('application/gzip') + .send(buf) + .expect(200); + expect(res.body.response.length).to.be(23); + }); + + it('should install a zip archive correctly', async function () { + const buf = fs.readFileSync(testPkgArchiveZip); + const res = await supertest + .post(`/api/ingest_manager/epm/packages`) + .set('kbn-xsrf', 'xxxx') + .type('application/zip') + .send(buf) + .expect(200); + expect(res.body.response.length).to.be(18); + }); + + it('should throw an error if the archive is zip but content type is gzip', async function () { + const buf = fs.readFileSync(testPkgArchiveZip); + const res = await supertest + .post(`/api/ingest_manager/epm/packages`) + .set('kbn-xsrf', 'xxxx') + .type('application/gzip') + .send(buf) + .expect(400); + expect(res.error.text).to.equal( + '{"statusCode":400,"error":"Bad Request","message":"Uploaded archive seems empty. Assumed content type was application/gzip, check if this matches the archive type."}' + ); + }); + + it('should throw an error if the archive is tar.gz but content type is zip', async function () { + const buf = fs.readFileSync(testPkgArchiveTgz); + const res = await supertest + .post(`/api/ingest_manager/epm/packages`) + .set('kbn-xsrf', 'xxxx') + .type('application/zip') + .send(buf) + .expect(400); + expect(res.error.text).to.equal( + '{"statusCode":400,"error":"Bad Request","message":"Error during extraction of uploaded package: Error: end of central directory record signature not found. Assumed content type was application/zip, check if this matches the archive type."}' + ); + }); + + it('should throw an error if the archive contains two top-level directories', async function () { + const buf = fs.readFileSync(testPkgArchiveInvalidTwoToplevels); + const res = await supertest + .post(`/api/ingest_manager/epm/packages`) + .set('kbn-xsrf', 'xxxx') + .type('application/zip') + .send(buf) + .expect(400); + expect(res.error.text).to.equal( + '{"statusCode":400,"error":"Bad Request","message":"Package contains more than one top-level directory."}' + ); + }); + + it('should throw an error if the archive does not contain a manifest', async function () { + const buf = fs.readFileSync(testPkgArchiveInvalidNoManifest); + const res = await supertest + .post(`/api/ingest_manager/epm/packages`) + .set('kbn-xsrf', 'xxxx') + .type('application/zip') + .send(buf) + .expect(400); + expect(res.error.text).to.equal( + '{"statusCode":400,"error":"Bad Request","message":"Package must contain a top-level manifest.yml file."}' + ); + }); + + it('should throw an error if the archive manifest contains invalid YAML', async function () { + const buf = fs.readFileSync(testPkgArchiveInvalidManifestInvalidYaml); + const res = await supertest + .post(`/api/ingest_manager/epm/packages`) + .set('kbn-xsrf', 'xxxx') + .type('application/zip') + .send(buf) + .expect(400); + expect(res.error.text).to.equal( + '{"statusCode":400,"error":"Bad Request","message":"Could not parse top-level package manifest: YAMLException: bad indentation of a mapping entry at line 2, column 7:\\n name: apache\\n ^."}' + ); + }); + + it('should throw an error if the archive manifest misses a mandatory field', async function () { + const buf = fs.readFileSync(testPkgArchiveInvalidManifestMissingField); + const res = await supertest + .post(`/api/ingest_manager/epm/packages`) + .set('kbn-xsrf', 'xxxx') + .type('application/zip') + .send(buf) + .expect(400); + expect(res.error.text).to.equal( + '{"statusCode":400,"error":"Bad Request","message":"Invalid top-level package manifest: one or more fields missing of name, version, description, type, categories, format_version"}' + ); + }); + + it('should throw an error if the toplevel directory name does not match the package key', async function () { + const buf = fs.readFileSync(testPkgArchiveInvalidToplevelMismatch); + const res = await supertest + .post(`/api/ingest_manager/epm/packages`) + .set('kbn-xsrf', 'xxxx') + .type('application/zip') + .send(buf) + .expect(400); + expect(res.error.text).to.equal( + '{"statusCode":400,"error":"Bad Request","message":"Name thisIsATypo and version 0.1.4 do not match top-level directory apache-0.1.4"}' + ); }); }); } diff --git a/x-pack/test/ingest_manager_api_integration/apis/epm/install_remove_assets.ts b/x-pack/test/ingest_manager_api_integration/apis/epm/install_remove_assets.ts index d33d0445d1cd6..05fdaeaa38164 100644 --- a/x-pack/test/ingest_manager_api_integration/apis/epm/install_remove_assets.ts +++ b/x-pack/test/ingest_manager_api_integration/apis/epm/install_remove_assets.ts @@ -184,6 +184,7 @@ export default function (providerContext: FtrProviderContext) { install_version: '0.1.0', install_status: 'installed', install_started_at: res.attributes.install_started_at, + install_source: 'registry', }); }); }); diff --git a/x-pack/test/ingest_manager_api_integration/apis/epm/update_assets.ts b/x-pack/test/ingest_manager_api_integration/apis/epm/update_assets.ts index 9af27f5f98558..8608756c37f54 100644 --- a/x-pack/test/ingest_manager_api_integration/apis/epm/update_assets.ts +++ b/x-pack/test/ingest_manager_api_integration/apis/epm/update_assets.ts @@ -325,6 +325,7 @@ export default function (providerContext: FtrProviderContext) { install_version: '0.2.0', install_status: 'installed', install_started_at: res.attributes.install_started_at, + install_source: 'registry', }); }); }); diff --git a/x-pack/test/ingest_manager_api_integration/apis/fixtures/direct_upload_packages/apache_0.1.4.zip b/x-pack/test/ingest_manager_api_integration/apis/fixtures/direct_upload_packages/apache_0.1.4.zip new file mode 100644 index 0000000000000..410b00ecde2be Binary files /dev/null and b/x-pack/test/ingest_manager_api_integration/apis/fixtures/direct_upload_packages/apache_0.1.4.zip differ diff --git a/x-pack/test/ingest_manager_api_integration/apis/fixtures/direct_upload_packages/apache_invalid_manifest_invalid_yaml_0.1.4.zip b/x-pack/test/ingest_manager_api_integration/apis/fixtures/direct_upload_packages/apache_invalid_manifest_invalid_yaml_0.1.4.zip new file mode 100644 index 0000000000000..e18db3c0e3df0 Binary files /dev/null and b/x-pack/test/ingest_manager_api_integration/apis/fixtures/direct_upload_packages/apache_invalid_manifest_invalid_yaml_0.1.4.zip differ diff --git a/x-pack/test/ingest_manager_api_integration/apis/fixtures/direct_upload_packages/apache_invalid_manifest_missing_field_0.1.4.zip b/x-pack/test/ingest_manager_api_integration/apis/fixtures/direct_upload_packages/apache_invalid_manifest_missing_field_0.1.4.zip new file mode 100644 index 0000000000000..8526f6a53458b Binary files /dev/null and b/x-pack/test/ingest_manager_api_integration/apis/fixtures/direct_upload_packages/apache_invalid_manifest_missing_field_0.1.4.zip differ diff --git a/x-pack/test/ingest_manager_api_integration/apis/fixtures/direct_upload_packages/apache_invalid_no_manifest_0.1.4.zip b/x-pack/test/ingest_manager_api_integration/apis/fixtures/direct_upload_packages/apache_invalid_no_manifest_0.1.4.zip new file mode 100644 index 0000000000000..ec410421130c5 Binary files /dev/null and b/x-pack/test/ingest_manager_api_integration/apis/fixtures/direct_upload_packages/apache_invalid_no_manifest_0.1.4.zip differ diff --git a/x-pack/test/ingest_manager_api_integration/apis/fixtures/direct_upload_packages/apache_invalid_toplevel_mismatch_0.1.4.zip b/x-pack/test/ingest_manager_api_integration/apis/fixtures/direct_upload_packages/apache_invalid_toplevel_mismatch_0.1.4.zip new file mode 100644 index 0000000000000..18e035e5192c4 Binary files /dev/null and b/x-pack/test/ingest_manager_api_integration/apis/fixtures/direct_upload_packages/apache_invalid_toplevel_mismatch_0.1.4.zip differ diff --git a/x-pack/test/ingest_manager_api_integration/apis/fixtures/direct_upload_packages/apache_invalid_two_toplevels_0.1.4.zip b/x-pack/test/ingest_manager_api_integration/apis/fixtures/direct_upload_packages/apache_invalid_two_toplevels_0.1.4.zip new file mode 100644 index 0000000000000..cfe8a809ae92b Binary files /dev/null and b/x-pack/test/ingest_manager_api_integration/apis/fixtures/direct_upload_packages/apache_invalid_two_toplevels_0.1.4.zip differ