"use strict"; var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault"); Object.defineProperty(exports, "__esModule", { value: true }); exports.createInstallation = createInstallation; exports.ensureInstalledPackage = ensureInstalledPackage; exports.ensurePackagesCompletedInstall = ensurePackagesCompletedInstall; exports.getInstallType = getInstallType; exports.handleInstallPackageFailure = handleInstallPackageFailure; exports.installAssetsForInputPackagePolicy = installAssetsForInputPackagePolicy; exports.installCustomPackage = installCustomPackage; exports.installIndexTemplatesAndPipelines = installIndexTemplatesAndPipelines; exports.installPackage = installPackage; exports.isPackageInstalled = isPackageInstalled; exports.isPackageVersionOrLaterInstalled = isPackageVersionOrLaterInstalled; exports.optimisticallyAddEsAssetReferences = void 0; exports.restartInstallation = restartInstallation; exports.updateVersion = exports.updateInstallStatus = exports.updateEsAssetReferences = exports.saveKibanaAssetsRefs = void 0; var _elasticApmNode = _interopRequireDefault(require("elastic-apm-node")); var _i18n = require("@kbn/i18n"); var _lt = _interopRequireDefault(require("semver/functions/lt")); var _server = require("@kbn/core/server"); var _constants = require("@kbn/spaces-plugin/common/constants"); var _pRetry = _interopRequireDefault(require("p-retry")); var _lodash = require("lodash"); var _services = require("../../../../common/services"); var _fleet_es_assets = require("../../../constants/fleet_es_assets"); var _template = require("../elasticsearch/template/template"); var _constants2 = require("../../../../common/constants"); var _errors = require("../../../errors"); var _constants3 = require("../../../constants"); var _ = require("../.."); var _app_context = require("../../app_context"); var Registry = _interopRequireWildcard(require("../registry")); var _archive = require("../archive"); var _install = require("../kibana/assets/install"); var _upgrade_sender = require("../../upgrade_sender"); var _ingest_pipeline = require("../elasticsearch/ingest_pipeline"); var _install2 = require("../elasticsearch/template/install"); var _audit_logging = require("../../audit_logging"); var _filtered_packages = require("../filtered_packages"); var _package_verification = require("./package_verification"); var _2 = require("."); var _remove = require("./remove"); var _get = require("./get"); var _install_package = require("./_install_package"); var _cleanup = require("./cleanup"); var _bundled_packages = require("./bundled_packages"); var _utils = require("./utils"); var _utils2 = require("./custom_integrations/utils"); var _constants4 = require("./custom_integrations/constants"); var _custom_integrations = require("./custom_integrations"); var _cache = require("./custom_integrations/assets/cache"); var _utils3 = require("./custom_integrations/assets/dataset/utils"); var _check_naming_collision = require("./custom_integrations/validation/check_naming_collision"); function _getRequireWildcardCache(nodeInterop) { if (typeof WeakMap !== "function") return null; var cacheBabelInterop = new WeakMap(); var cacheNodeInterop = new WeakMap(); return (_getRequireWildcardCache = function (nodeInterop) { return nodeInterop ? cacheNodeInterop : cacheBabelInterop; })(nodeInterop); } function _interopRequireWildcard(obj, nodeInterop) { if (!nodeInterop && obj && obj.__esModule) { return obj; } if (obj === null || typeof obj !== "object" && typeof obj !== "function") { return { default: obj }; } var cache = _getRequireWildcardCache(nodeInterop); if (cache && cache.has(obj)) { return cache.get(obj); } var newObj = {}; var hasPropertyDescriptor = Object.defineProperty && Object.getOwnPropertyDescriptor; for (var key in obj) { if (key !== "default" && Object.prototype.hasOwnProperty.call(obj, key)) { var desc = hasPropertyDescriptor ? Object.getOwnPropertyDescriptor(obj, key) : null; if (desc && (desc.get || desc.set)) { Object.defineProperty(newObj, key, desc); } else { newObj[key] = obj[key]; } } } newObj.default = obj; if (cache) { cache.set(obj, newObj); } return newObj; } /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ async function isPackageInstalled(options) { const installedPackage = await (0, _2.getInstallation)(options); return installedPackage !== undefined; } async function isPackageVersionOrLaterInstalled(options) { const { savedObjectsClient, pkgName, pkgVersion } = options; const installedPackageObject = await (0, _2.getInstallationObject)({ savedObjectsClient, pkgName }); const installedPackage = installedPackageObject === null || installedPackageObject === void 0 ? void 0 : installedPackageObject.attributes; if (installedPackage && (installedPackage.version === pkgVersion || (0, _lt.default)(pkgVersion, installedPackage.version))) { let installType; try { installType = getInstallType({ pkgVersion, installedPkg: installedPackageObject }); } catch (e) { installType = 'unknown'; } return { package: installedPackage, installType }; } return false; } async function ensureInstalledPackage(options) { const { savedObjectsClient, pkgName, esClient, pkgVersion, force = false, spaceId = _constants.DEFAULT_SPACE_ID, authorizationHeader } = options; // If pkgVersion isn't specified, find the latest package version const pkgKeyProps = pkgVersion ? { name: pkgName, version: pkgVersion } : await Registry.fetchFindLatestPackageOrThrow(pkgName, { prerelease: true }); const installedPackageResult = await isPackageVersionOrLaterInstalled({ savedObjectsClient, pkgName: pkgKeyProps.name, pkgVersion: pkgKeyProps.version }); if (installedPackageResult) { return installedPackageResult.package; } const pkgkey = Registry.pkgToPkgKey(pkgKeyProps); const installResult = await installPackage({ installSource: 'registry', savedObjectsClient, pkgkey, spaceId, esClient, neverIgnoreVerificationError: !force, force: true, // Always force outdated packages to be installed if a later version isn't installed authorizationHeader }); if (installResult.error) { const errorPrefix = installResult.installType === 'update' || installResult.installType === 'reupdate' ? _i18n.i18n.translate('xpack.fleet.epm.install.packageUpdateError', { defaultMessage: 'Error updating {pkgName} to {pkgVersion}', values: { pkgName: pkgKeyProps.name, pkgVersion: pkgKeyProps.version } }) : _i18n.i18n.translate('xpack.fleet.epm.install.packageInstallError', { defaultMessage: 'Error installing {pkgName} {pkgVersion}', values: { pkgName: pkgKeyProps.name, pkgVersion: pkgKeyProps.version } }); installResult.error.message = `${errorPrefix}: ${installResult.error.message}`; throw installResult.error; } const installation = await (0, _2.getInstallation)({ savedObjectsClient, pkgName }); if (!installation) throw new Error(`could not get installation ${pkgName}`); return installation; } async function handleInstallPackageFailure({ savedObjectsClient, error, pkgName, pkgVersion, installedPkg, esClient, spaceId, authorizationHeader }) { if (error instanceof _errors.ConcurrentInstallOperationError) { return; } const logger = _app_context.appContextService.getLogger(); const pkgkey = Registry.pkgToPkgKey({ name: pkgName, version: pkgVersion }); // if there is an unknown server error, uninstall any package assets or reinstall the previous version if update try { const installType = getInstallType({ pkgVersion, installedPkg }); if (installType === 'install' || installType === 'reinstall') { logger.error(`uninstalling ${pkgkey} after error installing: [${error.toString()}]`); await (0, _remove.removeInstallation)({ savedObjectsClient, pkgName, pkgVersion, esClient }); return; } await updateInstallStatus({ savedObjectsClient, pkgName, status: 'install_failed' }).catch(err => { if (!_server.SavedObjectsErrorHelpers.isNotFoundError(err)) { logger.error(`failed to update package status to: install_failed ${err}`); } }); if (installType === 'update') { if (!installedPkg) { logger.error(`failed to rollback package after installation error ${error} because saved object was undefined`); return; } const prevVersion = `${pkgName}-${installedPkg.attributes.version}`; logger.error(`rolling back to ${prevVersion} after error installing ${pkgkey}`); await installPackage({ installSource: 'registry', savedObjectsClient, pkgkey: prevVersion, esClient, spaceId, force: true, authorizationHeader }); } } catch (e) { // If an error happens while removing the integration or while doing a rollback update the status to failed await updateInstallStatus({ savedObjectsClient, pkgName, status: 'install_failed' }).catch(err => { if (!_server.SavedObjectsErrorHelpers.isNotFoundError(err)) { logger.error(`failed to update package status to: install_failed ${err}`); } }); logger.error(`failed to uninstall or rollback package after installation error ${e}`); } } function getTelemetryEvent(pkgName, pkgVersion) { return { packageName: pkgName, currentVersion: 'unknown', newVersion: pkgVersion, status: 'failure', dryRun: false, eventType: _upgrade_sender.UpdateEventType.PACKAGE_INSTALL, installType: 'unknown' }; } function sendEvent(telemetryEvent) { (0, _upgrade_sender.sendTelemetryEvents)(_app_context.appContextService.getLogger(), _app_context.appContextService.getTelemetryEventsSender(), telemetryEvent); } async function installPackageFromRegistry({ savedObjectsClient, pkgkey, esClient, spaceId, authorizationHeader, force = false, ignoreConstraints = false, neverIgnoreVerificationError = false, prerelease = false }) { const logger = _app_context.appContextService.getLogger(); // TODO: change epm API to /packageName/version so we don't need to do this const { pkgName, pkgVersion: version } = Registry.splitPkgKey(pkgkey); let pkgVersion = version !== null && version !== void 0 ? version : ''; // if an error happens during getInstallType, report that we don't know let installType = 'unknown'; const installSource = 'registry'; const telemetryEvent = getTelemetryEvent(pkgName, pkgVersion); try { // get the currently installed package const installedPkg = await (0, _2.getInstallationObject)({ savedObjectsClient, pkgName }); installType = getInstallType({ pkgVersion, installedPkg }); telemetryEvent.installType = installType; telemetryEvent.currentVersion = (installedPkg === null || installedPkg === void 0 ? void 0 : installedPkg.attributes.version) || 'not_installed'; const queryLatest = () => Registry.fetchFindLatestPackageOrThrow(pkgName, { ignoreConstraints, prerelease: prerelease === true || (0, _services.isPackagePrerelease)(pkgVersion) // fetching latest GA version if the package to install is GA, so that it is allowed to install }); let latestPkg; // fetching latest package first to set the version if (!pkgVersion) { latestPkg = await queryLatest(); pkgVersion = latestPkg.version; } // get latest package version and requested version in parallel for performance const [latestPackage, { paths, packageInfo, verificationResult }] = await Promise.all([latestPkg ? Promise.resolve(latestPkg) : queryLatest(), Registry.getPackage(pkgName, pkgVersion, { ignoreUnverified: force && !neverIgnoreVerificationError })]); // let the user install if using the force flag or needing to reinstall or install a previous version due to failed update const installOutOfDateVersionOk = force || ['reinstall', 'reupdate', 'rollback'].includes(installType); // if the requested version is out-of-date of the latest package version, check if we allow it // if we don't allow it, return an error if ((0, _lt.default)(pkgVersion, latestPackage.version)) { if (!installOutOfDateVersionOk) { throw new _errors.PackageOutdatedError(`${pkgkey} is out-of-date and cannot be installed or updated`); } logger.debug(`${pkgkey} is out-of-date, installing anyway due to ${force ? 'force flag' : `install type ${installType}`}`); } return await installPackageCommon({ pkgName, pkgVersion, installSource, installedPkg, installType, savedObjectsClient, esClient, spaceId, force, packageInfo, paths, verificationResult, authorizationHeader }); } catch (e) { sendEvent({ ...telemetryEvent, errorMessage: e.message }); return { error: e, installType, installSource }; } } function getElasticSubscription(packageInfo) { var _packageInfo$conditio, _packageInfo$conditio2; const subscription = (_packageInfo$conditio = packageInfo.conditions) === null || _packageInfo$conditio === void 0 ? void 0 : (_packageInfo$conditio2 = _packageInfo$conditio.elastic) === null || _packageInfo$conditio2 === void 0 ? void 0 : _packageInfo$conditio2.subscription; // Keep packageInfo.license for backward compatibility return subscription || packageInfo.license || 'basic'; } async function installPackageCommon(options) { const { pkgName, pkgVersion, installSource, installedPkg, installType, savedObjectsClient, force, esClient, spaceId, packageInfo, paths, verificationResult, authorizationHeader } = options; let { telemetryEvent } = options; const logger = _app_context.appContextService.getLogger(); // Workaround apm issue with async spans: https://github.com/elastic/apm-agent-nodejs/issues/2611 await Promise.resolve(); const span = _elasticApmNode.default.startSpan(`Install package from ${installSource} ${pkgName}@${pkgVersion}`, 'package'); if (!telemetryEvent) { telemetryEvent = getTelemetryEvent(pkgName, pkgVersion); telemetryEvent.installType = installType; telemetryEvent.currentVersion = (installedPkg === null || installedPkg === void 0 ? void 0 : installedPkg.attributes.version) || 'not_installed'; } try { span === null || span === void 0 ? void 0 : span.addLabels({ packageName: pkgName, packageVersion: pkgVersion, installType }); const filteredPackages = (0, _filtered_packages.getFilteredInstallPackages)(); if (filteredPackages.includes(pkgName)) { throw new _errors.FleetUnauthorizedError(`${pkgName} installation is not authorized`); } // if the requested version is the same as installed version, check if we allow it based on // current installed package status and force flag, if we don't allow it, // just return the asset references from the existing installation if ((installedPkg === null || installedPkg === void 0 ? void 0 : installedPkg.attributes.version) === pkgVersion && (installedPkg === null || installedPkg === void 0 ? void 0 : installedPkg.attributes.install_status) === 'installed') { if (!force) { logger.debug(`${pkgName}-${pkgVersion} is already installed, skipping installation`); return { assets: [...installedPkg.attributes.installed_es, ...installedPkg.attributes.installed_kibana], status: 'already_installed', installType, installSource }; } } const elasticSubscription = getElasticSubscription(packageInfo); if (!_.licenseService.hasAtLeast(elasticSubscription)) { const err = new Error(`Requires ${elasticSubscription} license`); sendEvent({ ...telemetryEvent, errorMessage: err.message }); return { error: err, installType, installSource }; } const savedObjectsImporter = _app_context.appContextService.getSavedObjects().createImporter(savedObjectsClient, { importSizeLimit: 15_000 }); const savedObjectTagAssignmentService = _app_context.appContextService.getSavedObjectsTagging().createInternalAssignmentService({ client: savedObjectsClient }); const savedObjectTagClient = _app_context.appContextService.getSavedObjectsTagging().createTagClient({ client: savedObjectsClient }); // try installing the package, if there was an error, call error handler and rethrow // @ts-expect-error status is string instead of InstallResult.status 'installed' | 'already_installed' return await (0, _install_package._installPackage)({ savedObjectsClient, savedObjectsImporter, savedObjectTagAssignmentService, savedObjectTagClient, esClient, logger, installedPkg, paths, packageInfo, installType, spaceId, verificationResult, installSource, authorizationHeader, force }).then(async assets => { await (0, _cleanup.removeOldAssets)({ soClient: savedObjectsClient, pkgName: packageInfo.name, currentVersion: packageInfo.version }); sendEvent({ ...telemetryEvent, status: 'success' }); return { assets, status: 'installed', installType, installSource }; }).catch(async err => { logger.warn(`Failure to install package [${pkgName}]: [${err.toString()}]`); await handleInstallPackageFailure({ savedObjectsClient, error: err, pkgName, pkgVersion, installedPkg, spaceId, esClient, authorizationHeader }); sendEvent({ ...telemetryEvent, errorMessage: err.message }); return { error: err, installType, installSource }; }); } catch (e) { sendEvent({ ...telemetryEvent, errorMessage: e.message }); return { error: e, installType, installSource }; } finally { span === null || span === void 0 ? void 0 : span.end(); } } async function installPackageByUpload({ savedObjectsClient, esClient, archiveBuffer, contentType, spaceId, version, authorizationHeader }) { // if an error happens during getInstallType, report that we don't know let installType = 'unknown'; const installSource = 'upload'; try { const { packageInfo } = await (0, _archive.generatePackageInfoFromArchiveBuffer)(archiveBuffer, contentType); const pkgName = packageInfo.name; // Allow for overriding the version in the manifest for cases where we install // stack-aligned bundled packages to support special cases around the // `forceAlignStackVersion` flag in `fleet_packages.json`. const pkgVersion = version || packageInfo.version; const installedPkg = await (0, _2.getInstallationObject)({ savedObjectsClient, pkgName }); installType = getInstallType({ pkgVersion, installedPkg }); // as we do not verify uploaded packages, we must invalidate the verification cache (0, _archive.deleteVerificationResult)(packageInfo); const paths = await (0, _archive.unpackBufferToCache)({ name: packageInfo.name, version: pkgVersion, archiveBuffer, contentType }); (0, _archive.setPackageInfo)({ name: packageInfo.name, version: pkgVersion, packageInfo }); return await installPackageCommon({ pkgName, pkgVersion, installSource, installedPkg, installType, savedObjectsClient, esClient, spaceId, force: true, // upload has implicit force packageInfo, paths, authorizationHeader }); } catch (e) { return { error: e, installType, installSource }; } } async function installPackage(args) { if (!('installSource' in args)) { throw new Error('installSource is required'); } const logger = _app_context.appContextService.getLogger(); const { savedObjectsClient, esClient } = args; const authorizationHeader = args.authorizationHeader; const bundledPackages = await (0, _bundled_packages.getBundledPackages)(); if (args.installSource === 'registry') { const { pkgkey, force, ignoreConstraints, spaceId, neverIgnoreVerificationError, prerelease } = args; const matchingBundledPackage = bundledPackages.find(pkg => Registry.pkgToPkgKey(pkg) === pkgkey); if (matchingBundledPackage) { logger.debug(`found bundled package for requested install of ${pkgkey} - installing from bundled package archive`); const response = await installPackageByUpload({ savedObjectsClient, esClient, archiveBuffer: matchingBundledPackage.buffer, contentType: 'application/zip', spaceId, version: matchingBundledPackage.version, authorizationHeader }); return { ...response, installSource: 'bundled' }; } logger.debug(`kicking off install of ${pkgkey} from registry`); const response = await installPackageFromRegistry({ savedObjectsClient, pkgkey, esClient, spaceId, force, neverIgnoreVerificationError, ignoreConstraints, prerelease, authorizationHeader }); return response; } else if (args.installSource === 'upload') { const { archiveBuffer, contentType, spaceId } = args; const response = await installPackageByUpload({ savedObjectsClient, esClient, archiveBuffer, contentType, spaceId, authorizationHeader }); return response; } else if (args.installSource === 'custom') { const { pkgName, force, datasets, spaceId, kibanaVersion } = args; const response = await installCustomPackage({ savedObjectsClient, pkgName, datasets, esClient, spaceId, force, authorizationHeader, kibanaVersion }); return response; } throw new Error(`Unknown installSource: ${args.installSource}`); } async function installCustomPackage(args) { var _authorizationHeader$; const { savedObjectsClient, esClient, spaceId, pkgName, force, authorizationHeader, datasets, kibanaVersion } = args; // Validate that we can create this package, validations will throw if they don't pass await (0, _check_naming_collision.checkForNamingCollision)(savedObjectsClient, pkgName); // Compose a packageInfo const packageInfo = { format_version: _constants2.CUSTOM_INTEGRATION_PACKAGE_SPEC_VERSION, name: pkgName, title: (0, _utils2.convertStringToTitle)(pkgName), description: (0, _utils2.generateDescription)(datasets.map(dataset => dataset.name)), version: _constants4.INITIAL_VERSION, owner: { github: (_authorizationHeader$ = authorizationHeader === null || authorizationHeader === void 0 ? void 0 : authorizationHeader.username) !== null && _authorizationHeader$ !== void 0 ? _authorizationHeader$ : 'unknown' }, type: 'integration', data_streams: (0, _utils3.generateDatastreamEntries)(datasets, pkgName) }; const assets = (0, _custom_integrations.createAssets)({ ...packageInfo, kibanaVersion, datasets }); const paths = (0, _cache.cacheAssets)(assets, pkgName, _constants4.INITIAL_VERSION); return await installPackageCommon({ pkgName, pkgVersion: _constants4.INITIAL_VERSION, installSource: 'custom', installType: 'install', savedObjectsClient, esClient, spaceId, force, packageInfo, paths, authorizationHeader }); } const updateVersion = async (savedObjectsClient, pkgName, pkgVersion) => { _audit_logging.auditLoggingService.writeCustomSoAuditLog({ action: 'update', id: pkgName, savedObjectType: _constants3.PACKAGES_SAVED_OBJECT_TYPE }); return savedObjectsClient.update(_constants3.PACKAGES_SAVED_OBJECT_TYPE, pkgName, { version: pkgVersion }); }; exports.updateVersion = updateVersion; const updateInstallStatus = async ({ savedObjectsClient, pkgName, status }) => { _audit_logging.auditLoggingService.writeCustomSoAuditLog({ action: 'update', id: pkgName, savedObjectType: _constants3.PACKAGES_SAVED_OBJECT_TYPE }); return savedObjectsClient.update(_constants3.PACKAGES_SAVED_OBJECT_TYPE, pkgName, { install_status: status }); }; exports.updateInstallStatus = updateInstallStatus; async function restartInstallation(options) { const { savedObjectsClient, pkgVersion, pkgName, installSource, verificationResult } = options; let savedObjectUpdate = { install_version: pkgVersion, install_status: 'installing', install_started_at: new Date().toISOString(), install_source: installSource }; if (verificationResult) { savedObjectUpdate = { ...savedObjectUpdate, verification_key_id: null, // unset any previous verification key id ...(0, _package_verification.formatVerificationResultForSO)(verificationResult) }; } _audit_logging.auditLoggingService.writeCustomSoAuditLog({ action: 'update', id: pkgName, savedObjectType: _constants3.PACKAGES_SAVED_OBJECT_TYPE }); await savedObjectsClient.update(_constants3.PACKAGES_SAVED_OBJECT_TYPE, pkgName, savedObjectUpdate); } async function createInstallation(options) { const { savedObjectsClient, packageInfo, installSource, verificationResult } = options; const { name: pkgName, version: pkgVersion } = packageInfo; const toSaveESIndexPatterns = (0, _template.generateESIndexPatterns)(packageInfo.data_streams); // For "stack-aligned" packages, default the `keep_policies_up_to_date` setting to true. For all other // packages, default it to undefined. Use undefined rather than false to allow us to differentiate // between "unset" and "user explicitly disabled". const defaultKeepPoliciesUpToDate = _constants2.AUTO_UPGRADE_POLICIES_PACKAGES.some(({ name }) => name === packageInfo.name) ? true : undefined; let savedObject = { installed_kibana: [], installed_kibana_space_id: options.spaceId, installed_es: [], package_assets: [], es_index_patterns: toSaveESIndexPatterns, name: pkgName, version: pkgVersion, install_version: pkgVersion, install_status: 'installing', install_started_at: new Date().toISOString(), install_source: installSource, install_format_schema_version: _fleet_es_assets.FLEET_INSTALL_FORMAT_VERSION, keep_policies_up_to_date: defaultKeepPoliciesUpToDate, verification_status: 'unknown' }; if (verificationResult) { savedObject = { ...savedObject, ...(0, _package_verification.formatVerificationResultForSO)(verificationResult) }; } _audit_logging.auditLoggingService.writeCustomSoAuditLog({ action: 'create', id: pkgName, savedObjectType: _constants3.PACKAGES_SAVED_OBJECT_TYPE }); const created = await savedObjectsClient.create(_constants3.PACKAGES_SAVED_OBJECT_TYPE, savedObject, { id: pkgName, overwrite: true }); return created; } const saveKibanaAssetsRefs = async (savedObjectsClient, pkgName, kibanaAssets) => { _audit_logging.auditLoggingService.writeCustomSoAuditLog({ action: 'update', id: pkgName, savedObjectType: _constants3.PACKAGES_SAVED_OBJECT_TYPE }); const assetRefs = Object.values(kibanaAssets).flat().map(_install.toAssetReference); // Because Kibana assets are installed in parallel with ES assets with refresh: false, we almost always run into an // issue that causes a conflict error due to this issue: https://github.com/elastic/kibana/issues/126240. This is safe // to retry constantly until it succeeds to optimize this critical user journey path as much as possible. (0, _pRetry.default)(() => savedObjectsClient.update(_constants3.PACKAGES_SAVED_OBJECT_TYPE, pkgName, { installed_kibana: assetRefs }, { refresh: false }), { retries: 20 } // Use a number of retries higher than the number of es asset update operations ); return assetRefs; }; /** * Utility function for updating the installed_es field of a package */ exports.saveKibanaAssetsRefs = saveKibanaAssetsRefs; const updateEsAssetReferences = async (savedObjectsClient, pkgName, currentAssets, { assetsToAdd = [], assetsToRemove = [], refresh = false }) => { const withAssetsRemoved = currentAssets.filter(({ type, id }) => { if (assetsToRemove.some(({ type: removeType, id: removeId }) => removeType === type && removeId === id)) { return false; } return true; }); const deduplicatedAssets = (0, _lodash.uniqBy)([...withAssetsRemoved, ...assetsToAdd], ({ type, id }) => `${type}-${id}`); _audit_logging.auditLoggingService.writeCustomSoAuditLog({ action: 'update', id: pkgName, savedObjectType: _constants3.PACKAGES_SAVED_OBJECT_TYPE }); const { attributes: { installed_es: updatedAssets } } = // Because Kibana assets are installed in parallel with ES assets with refresh: false, we almost always run into an // issue that causes a conflict error due to this issue: https://github.com/elastic/kibana/issues/126240. This is safe // to retry constantly until it succeeds to optimize this critical user journey path as much as possible. await (0, _pRetry.default)(() => savedObjectsClient.update(_constants3.PACKAGES_SAVED_OBJECT_TYPE, pkgName, { installed_es: deduplicatedAssets }, { refresh }), // Use a lower number of retries for ES assets since they're installed in serial and can only conflict with // the single Kibana update call. { retries: 5 }); return updatedAssets !== null && updatedAssets !== void 0 ? updatedAssets : []; }; /** * Utility function for adding assets the installed_es field of a package * uses optimistic concurrency control to prevent missed updates */ exports.updateEsAssetReferences = updateEsAssetReferences; const optimisticallyAddEsAssetReferences = async (savedObjectsClient, pkgName, assetsToAdd) => { const addEsAssets = async () => { var _so$attributes$instal; // TODO: Should this be replaced by a `get()` call from epm/get.ts? const so = await savedObjectsClient.get(_constants3.PACKAGES_SAVED_OBJECT_TYPE, pkgName); _audit_logging.auditLoggingService.writeCustomSoAuditLog({ action: 'get', id: pkgName, savedObjectType: _constants3.PACKAGES_SAVED_OBJECT_TYPE }); const installedEs = (_so$attributes$instal = so.attributes.installed_es) !== null && _so$attributes$instal !== void 0 ? _so$attributes$instal : []; const deduplicatedAssets = (0, _lodash.uniqBy)([...installedEs, ...assetsToAdd], ({ type, id }) => `${type}-${id}`); _audit_logging.auditLoggingService.writeCustomSoAuditLog({ action: 'update', id: pkgName, savedObjectType: _constants3.PACKAGES_SAVED_OBJECT_TYPE }); const { attributes: { installed_es: updatedAssets } } = await savedObjectsClient.update(_constants3.PACKAGES_SAVED_OBJECT_TYPE, pkgName, { installed_es: deduplicatedAssets }, { version: so.version }); return updatedAssets !== null && updatedAssets !== void 0 ? updatedAssets : []; }; const onlyRetryConflictErrors = err => { if (!_server.SavedObjectsErrorHelpers.isConflictError(err)) { throw err; } }; return (0, _pRetry.default)(addEsAssets, { retries: 10, onFailedAttempt: onlyRetryConflictErrors }); }; exports.optimisticallyAddEsAssetReferences = optimisticallyAddEsAssetReferences; async function ensurePackagesCompletedInstall(savedObjectsClient, esClient) { const installingPackages = await (0, _get.getPackageSavedObjects)(savedObjectsClient, { searchFields: ['install_status'], search: 'installing' }); const installingPromises = installingPackages.saved_objects.reduce((acc, pkg) => { const startDate = pkg.attributes.install_started_at; const nowDate = new Date().toISOString(); const elapsedTime = Date.parse(nowDate) - Date.parse(startDate); const pkgkey = `${pkg.attributes.name}-${pkg.attributes.install_version}`; // reinstall package if (elapsedTime > _constants3.MAX_TIME_COMPLETE_INSTALL) { acc.push(installPackage({ installSource: 'registry', savedObjectsClient, pkgkey, esClient, spaceId: pkg.attributes.installed_kibana_space_id || _constants.DEFAULT_SPACE_ID })); } return acc; }, []); await Promise.all(installingPromises); return installingPackages; } async function installIndexTemplatesAndPipelines({ installedPkg, paths, packageInfo, esReferences, savedObjectsClient, esClient, logger, onlyForDataStreams }) { var _installedPkg$experim; /** * In order to install assets in parallel, we need to split the preparation step from the installation step. This * allows us to know which asset references are going to be installed so that we can save them on the packages * SO before installation begins. In the case of a failure during installing any individual asset, we'll have the * references necessary to remove any assets in that were successfully installed during the rollback phase. * * This split of prepare/install could be extended to all asset types. Besides performance, it also allows us to * more easily write unit tests against the asset generation code without needing to mock ES responses. */ const experimentalDataStreamFeatures = (_installedPkg$experim = installedPkg === null || installedPkg === void 0 ? void 0 : installedPkg.experimental_data_stream_features) !== null && _installedPkg$experim !== void 0 ? _installedPkg$experim : []; const preparedIngestPipelines = (0, _ingest_pipeline.prepareToInstallPipelines)(packageInfo, paths, onlyForDataStreams); const preparedIndexTemplates = (0, _install2.prepareToInstallTemplates)(packageInfo, paths, esReferences, experimentalDataStreamFeatures, onlyForDataStreams); // Update the references for the templates and ingest pipelines together. Need to be done together to avoid race // conditions on updating the installed_es field at the same time // These must be saved before we actually attempt to install the templates or pipelines so that we know what to // cleanup in the case that a single asset fails to install. let newEsReferences = []; if (onlyForDataStreams) { // if onlyForDataStreams is present that means we are in create package policy flow // not install flow, meaning we do not have a lock on the installation SO // so we need to use optimistic concurrency control newEsReferences = await optimisticallyAddEsAssetReferences(savedObjectsClient, packageInfo.name, [...preparedIngestPipelines.assetsToAdd, ...preparedIndexTemplates.assetsToAdd]); } else { newEsReferences = await updateEsAssetReferences(savedObjectsClient, packageInfo.name, esReferences, { assetsToRemove: preparedIndexTemplates.assetsToRemove, assetsToAdd: [...preparedIngestPipelines.assetsToAdd, ...preparedIndexTemplates.assetsToAdd] }); } // Install index templates and ingest pipelines in parallel since they typically take the longest const [installedTemplates] = await Promise.all([(0, _utils.withPackageSpan)('Install index templates', () => preparedIndexTemplates.install(esClient, logger)), // installs versionized pipelines without removing currently installed ones (0, _utils.withPackageSpan)('Install ingest pipelines', () => preparedIngestPipelines.install(esClient, logger))]); return { esReferences: newEsReferences, installedTemplates }; } async function installAssetsForInputPackagePolicy(opts) { var _packagePolicy$inputs, _packagePolicy$inputs2; const { pkgInfo, logger, packagePolicy, esClient, soClient, force } = opts; if (pkgInfo.type !== 'input') return; const paths = await (0, _archive.getArchiveFilelist)(pkgInfo); if (!paths) throw new Error('No paths found for '); const datasetName = (_packagePolicy$inputs = packagePolicy.inputs[0].streams[0].vars) === null || _packagePolicy$inputs === void 0 ? void 0 : (_packagePolicy$inputs2 = _packagePolicy$inputs[_constants2.DATASET_VAR_NAME]) === null || _packagePolicy$inputs2 === void 0 ? void 0 : _packagePolicy$inputs2.value; const [dataStream] = (0, _services.getNormalizedDataStreams)(pkgInfo, datasetName); const existingDataStreams = await _.dataStreamService.getMatchingDataStreams(esClient, { type: dataStream.type, dataset: datasetName }); if (existingDataStreams.length) { const existingDataStreamsAreFromDifferentPackage = existingDataStreams.some(ds => { var _ds$_meta, _ds$_meta$package; return ((_ds$_meta = ds._meta) === null || _ds$_meta === void 0 ? void 0 : (_ds$_meta$package = _ds$_meta.package) === null || _ds$_meta$package === void 0 ? void 0 : _ds$_meta$package.name) !== pkgInfo.name; }); if (existingDataStreamsAreFromDifferentPackage && !force) { // user has opted to send data to an existing data stream which is managed by another // package. This means certain custom setting such as elasticsearch settings // defined by the package will not have been applied which could lead // to unforeseen circumstances, so force flag must be used. const streamIndexPattern = _.dataStreamService.streamPartsToIndexPattern({ type: dataStream.type, dataset: datasetName }); throw new _errors.PackagePolicyValidationError(`Datastreams matching "${streamIndexPattern}" already exist and are not managed by this package, force flag is required`); } else { logger.info(`Data stream ${dataStream.name} already exists, skipping index template creation for ${packagePolicy.id}`); return; } } const existingIndexTemplate = await _.dataStreamService.getMatchingIndexTemplate(esClient, { type: dataStream.type, dataset: datasetName }); if (existingIndexTemplate) { var _existingIndexTemplat, _existingIndexTemplat2; const indexTemplateOwnnedByDifferentPackage = ((_existingIndexTemplat = existingIndexTemplate._meta) === null || _existingIndexTemplat === void 0 ? void 0 : (_existingIndexTemplat2 = _existingIndexTemplat.package) === null || _existingIndexTemplat2 === void 0 ? void 0 : _existingIndexTemplat2.name) !== pkgInfo.name; if (indexTemplateOwnnedByDifferentPackage && !force) { // index template already exists but there is no data stream yet // we do not want to override the index template throw new _errors.PackagePolicyValidationError(`Index template "${dataStream.type}-${datasetName}" already exist and is not managed by this package, force flag is required`); } else { logger.info(`Index template "${dataStream.type}-${datasetName}" already exists, skipping index template creation for ${packagePolicy.id}`); return; } } const installedPkg = await (0, _2.getInstallation)({ savedObjectsClient: soClient, pkgName: pkgInfo.name, logger }); if (!installedPkg) throw new Error('Unable to find installed package while creating index templates'); await installIndexTemplatesAndPipelines({ installedPkg, paths, packageInfo: pkgInfo, esReferences: installedPkg.installed_es || [], savedObjectsClient: soClient, esClient, logger, onlyForDataStreams: [dataStream] }); } // implementation function getInstallType(args) { const { pkgVersion, installedPkg } = args; if (!installedPkg) return 'install'; const currentPkgVersion = installedPkg.attributes.version; const lastStartedInstallVersion = installedPkg.attributes.install_version; if (pkgVersion === currentPkgVersion && pkgVersion !== lastStartedInstallVersion) return 'rollback'; if (pkgVersion === currentPkgVersion) return 'reinstall'; if (pkgVersion === lastStartedInstallVersion && pkgVersion !== currentPkgVersion) return 'reupdate'; if (pkgVersion !== lastStartedInstallVersion && pkgVersion !== currentPkgVersion) return 'update'; throw new Error('unknown install type'); }