"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.query = void 0; var _runtime_types = require("../../../common/runtime_types"); var _constants = require("../../../common/constants"); var _types = require("./types"); var _constants2 = require("./constants"); var _create_aggregations = require("./lib/create_aggregations"); var _convert_buckets_to_metrics_series = require("./lib/convert_buckets_to_metrics_series"); var _calculate_bucket_size = require("./lib/calculate_bucket_size"); var _calculate_interval = require("./lib/calculate_interval"); /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ const DEFAULT_LIMIT = 9; const query = async (search, rawOptions) => { var _options$groupBy$map, _options$groupBy, _options$filters; const interval = await (0, _calculate_interval.calculatedInterval)(search, rawOptions); const options = { ...rawOptions, timerange: { ...rawOptions.timerange, interval } }; const hasGroupBy = Array.isArray(options.groupBy) && options.groupBy.length > 0; const filter = [{ range: { [options.timerange.timeFieldName || _constants.TIMESTAMP_FIELD]: { gte: options.timerange.from, lte: options.timerange.to, format: 'epoch_millis' } } }, ...((_options$groupBy$map = (_options$groupBy = options.groupBy) === null || _options$groupBy === void 0 ? void 0 : _options$groupBy.map(field => ({ exists: { field } }))) !== null && _options$groupBy$map !== void 0 ? _options$groupBy$map : [])]; const params = { allow_no_indices: true, ignore_unavailable: true, index: options.indexPattern, body: { size: 0, query: { bool: { filter: [...filter, ...((_options$filters = options.filters) !== null && _options$filters !== void 0 ? _options$filters : [])] } }, aggs: hasGroupBy ? (0, _create_aggregations.createCompositeAggregations)(options) : (0, _create_aggregations.createAggregations)(options) } }; try { const response = await search(params); if (response.hits.total.value === 0) { return _constants2.EMPTY_RESPONSE; } if (!response.aggregations) { throw new Error('Aggregations should be present.'); } const { bucketSize } = (0, _calculate_bucket_size.calculateBucketSize)({ ...options.timerange, interval }); if (hasGroupBy) { var _options$limit; const aggregations = (0, _runtime_types.decodeOrThrow)(_types.CompositeResponseRT)(response.aggregations); const { groupings } = aggregations; const limit = (_options$limit = options.limit) !== null && _options$limit !== void 0 ? _options$limit : DEFAULT_LIMIT; const returnAfterKey = !!groupings.after_key && groupings.buckets.length === limit; const afterKey = returnAfterKey ? groupings.after_key : null; return { series: getSeriesFromCompositeAggregations(groupings, options, bucketSize * 1000), info: { afterKey, interval: rawOptions.includeTimeseries ? bucketSize : undefined } }; } const aggregations = (0, _runtime_types.decodeOrThrow)(_types.AggregationResponseRT)(response.aggregations); return { series: getSeriesFromHistogram(aggregations, options, bucketSize * 1000), info: { afterKey: null, interval: bucketSize } }; } catch (e) { throw e; } }; exports.query = query; const getSeriesFromHistogram = (aggregations, options, bucketSize) => { return [(0, _convert_buckets_to_metrics_series.convertBucketsToMetricsApiSeries)(['*'], options, aggregations.histogram.buckets, bucketSize)]; }; const getSeriesFromCompositeAggregations = (groupings, options, bucketSize) => { return groupings.buckets.map(bucket => { const keys = Object.values(bucket.key); const metricsetNames = bucket.metricsets.buckets.map(m => m.key); const metrics = (0, _convert_buckets_to_metrics_series.convertBucketsToMetricsApiSeries)(keys, options, _types.HistogramBucketRT.is(bucket) ? bucket.histogram.buckets : [bucket], bucketSize); return { ...metrics, metricsets: metricsetNames }; }); };