[Monitoring] Migrated legacy Elasticsearch client for 8.0 (#101850)

This commit is contained in:
Chris Cowan 2021-06-29 17:07:56 -07:00 committed by GitHub
parent fee7348806
commit 25db1df1a3
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
96 changed files with 685 additions and 526 deletions

View file

@ -6,7 +6,7 @@
*/
import { ConfigOptions } from 'elasticsearch';
import { Logger, ILegacyCustomClusterClient } from 'kibana/server';
import { Logger, ICustomClusterClient, ElasticsearchClientConfig } from 'kibana/server';
// @ts-ignore
import { monitoringBulk } from '../kibana_monitoring/lib/monitoring_bulk';
import { monitoringEndpointDisableWatches } from './monitoring_endpoint_disable_watches';
@ -25,8 +25,8 @@ export function instantiateClient(
log: Logger,
createClient: (
type: string,
clientConfig?: Partial<ESClusterConfig>
) => ILegacyCustomClusterClient
clientConfig?: Partial<ElasticsearchClientConfig> | undefined
) => ICustomClusterClient
) {
const isMonitoringCluster = hasMonitoringCluster(elasticsearchConfig);
const cluster = createClient('monitoring', {

View file

@ -6,11 +6,11 @@
*/
import { getMonitoringUsageCollector } from './get_usage_collector';
import { fetchClustersLegacy } from '../../lib/alerts/fetch_clusters';
import { fetchClusters } from '../../lib/alerts/fetch_clusters';
import { elasticsearchServiceMock } from '../../../../../../src/core/server/mocks';
jest.mock('../../lib/alerts/fetch_clusters', () => ({
fetchClustersLegacy: jest.fn().mockImplementation(() => {
fetchClusters: jest.fn().mockImplementation(() => {
return [
{
clusterUuid: '1abc',
@ -59,7 +59,8 @@ jest.mock('./lib/fetch_license_type', () => ({
}));
describe('getMonitoringUsageCollector', () => {
const esClient = elasticsearchServiceMock.createLegacyClusterClient();
const esClient = elasticsearchServiceMock.createClusterClient();
const getEsClient = () => esClient;
const config: any = {
ui: {
ccs: {
@ -72,7 +73,7 @@ describe('getMonitoringUsageCollector', () => {
const usageCollection: any = {
makeUsageCollector: jest.fn(),
};
await getMonitoringUsageCollector(usageCollection, config, esClient);
getMonitoringUsageCollector(usageCollection, config, getEsClient);
const mock = (usageCollection.makeUsageCollector as jest.Mock).mock;
@ -122,7 +123,7 @@ describe('getMonitoringUsageCollector', () => {
makeUsageCollector: jest.fn(),
};
await getMonitoringUsageCollector(usageCollection, config, esClient);
getMonitoringUsageCollector(usageCollection, config, getEsClient);
const mock = (usageCollection.makeUsageCollector as jest.Mock).mock;
const args = mock.calls[0];
@ -149,11 +150,11 @@ describe('getMonitoringUsageCollector', () => {
makeUsageCollector: jest.fn(),
};
await getMonitoringUsageCollector(usageCollection, config, esClient);
getMonitoringUsageCollector(usageCollection, config, getEsClient);
const mock = (usageCollection.makeUsageCollector as jest.Mock).mock;
const args = mock.calls[0];
(fetchClustersLegacy as jest.Mock).mockImplementation(() => {
(fetchClusters as jest.Mock).mockImplementation(() => {
return [];
});
@ -169,11 +170,11 @@ describe('getMonitoringUsageCollector', () => {
makeUsageCollector: jest.fn(),
};
await getMonitoringUsageCollector(usageCollection, config, esClient);
getMonitoringUsageCollector(usageCollection, config, getEsClient);
const mock = (usageCollection.makeUsageCollector as jest.Mock).mock;
const args = mock.calls[0];
(fetchClustersLegacy as jest.Mock).mockImplementation(() => {
(fetchClusters as jest.Mock).mockImplementation(() => {
return [];
});

View file

@ -6,20 +6,20 @@
*/
import { UsageCollectionSetup } from 'src/plugins/usage_collection/server';
import { ILegacyClusterClient } from 'src/core/server';
import { IClusterClient } from 'src/core/server';
import { MonitoringConfig } from '../../config';
import { fetchAvailableCcsLegacy } from '../../lib/alerts/fetch_available_ccs';
import { fetchAvailableCcs } from '../../lib/alerts/fetch_available_ccs';
import { getStackProductsUsage } from './lib/get_stack_products_usage';
import { fetchLicenseType } from './lib/fetch_license_type';
import { MonitoringUsage, StackProductUsage, MonitoringClusterStackProductUsage } from './types';
import { INDEX_PATTERN_ELASTICSEARCH } from '../../../common/constants';
import { getCcsIndexPattern } from '../../lib/alerts/get_ccs_index_pattern';
import { fetchClustersLegacy } from '../../lib/alerts/fetch_clusters';
import { fetchClusters } from '../../lib/alerts/fetch_clusters';
export function getMonitoringUsageCollector(
usageCollection: UsageCollectionSetup,
config: MonitoringConfig,
legacyEsClient: ILegacyClusterClient
getClient: () => IClusterClient
) {
return usageCollection.makeUsageCollector<MonitoringUsage, true>({
type: 'monitoring',
@ -103,12 +103,12 @@ export function getMonitoringUsageCollector(
},
fetch: async ({ kibanaRequest }) => {
const callCluster = kibanaRequest
? legacyEsClient.asScoped(kibanaRequest).callAsCurrentUser
: legacyEsClient.callAsInternalUser;
? getClient().asScoped(kibanaRequest).asCurrentUser
: getClient().asInternalUser;
const usageClusters: MonitoringClusterStackProductUsage[] = [];
const availableCcs = config.ui.ccs.enabled ? await fetchAvailableCcsLegacy(callCluster) : [];
const availableCcs = config.ui.ccs.enabled ? await fetchAvailableCcs(callCluster) : [];
const elasticsearchIndex = getCcsIndexPattern(INDEX_PATTERN_ELASTICSEARCH, availableCcs);
const clusters = await fetchClustersLegacy(callCluster, elasticsearchIndex);
const clusters = await fetchClusters(callCluster, elasticsearchIndex);
for (const cluster of clusters) {
const license = await fetchLicenseType(callCluster, availableCcs, cluster.clusterUuid);
const stackProducts = await getStackProductsUsage(

View file

@ -5,7 +5,7 @@
* 2.0.
*/
import { ILegacyClusterClient } from 'src/core/server';
import { IClusterClient } from 'src/core/server';
import { UsageCollectionSetup } from 'src/plugins/usage_collection/server';
import { getSettingsCollector } from './get_settings_collector';
import { getMonitoringUsageCollector } from './get_usage_collector';
@ -16,10 +16,10 @@ export { KibanaSettingsCollector, getKibanaSettings } from './get_settings_colle
export function registerCollectors(
usageCollection: UsageCollectionSetup,
config: MonitoringConfig,
legacyEsClient: ILegacyClusterClient
getClient: () => IClusterClient
) {
usageCollection.registerCollector(getSettingsCollector(usageCollection, config));
usageCollection.registerCollector(
getMonitoringUsageCollector(usageCollection, config, legacyEsClient)
getMonitoringUsageCollector(usageCollection, config, getClient)
);
}

View file

@ -5,41 +5,45 @@
* 2.0.
*/
import { ElasticsearchClient } from 'kibana/server';
import { fetchESUsage } from './fetch_es_usage';
describe('fetchESUsage', () => {
const clusterUuid = '1abcde2';
const index = '.monitoring-es-*';
const callCluster = jest.fn().mockImplementation(() => ({
hits: {
hits: [
{
_source: {
cluster_stats: {
nodes: {
count: {
total: 10,
const callCluster = ({
search: jest.fn().mockImplementation(() => ({
body: {
hits: {
hits: [
{
_source: {
cluster_stats: {
nodes: {
count: {
total: 10,
},
},
},
},
},
],
},
aggregations: {
indices: {
buckets: [
{
key: '.monitoring-es-2',
},
],
},
},
],
},
aggregations: {
indices: {
buckets: [
{
key: '.monitoring-es-2',
},
],
},
},
}));
const config: any = {};
})),
} as unknown) as ElasticsearchClient;
it('should return usage data for Elasticsearch', async () => {
const result = await fetchESUsage(config, callCluster, clusterUuid, index);
const result = await fetchESUsage(callCluster, clusterUuid, index);
expect(result).toStrictEqual({
count: 10,
enabled: true,
@ -48,33 +52,37 @@ describe('fetchESUsage', () => {
});
it('should handle some indices coming from Metricbeat', async () => {
const customCallCluster = jest.fn().mockImplementation(() => ({
hits: {
hits: [
{
_source: {
cluster_stats: {
nodes: {
count: {
total: 10,
const customCallCluster = ({
search: jest.fn().mockImplementation(() => ({
body: {
hits: {
hits: [
{
_source: {
cluster_stats: {
nodes: {
count: {
total: 10,
},
},
},
},
},
],
},
aggregations: {
indices: {
buckets: [
{
key: '.monitoring-es-mb-2',
},
],
},
},
],
},
aggregations: {
indices: {
buckets: [
{
key: '.monitoring-es-mb-2',
},
],
},
},
}));
const result = await fetchESUsage(config, customCallCluster, clusterUuid, index);
})),
} as unknown) as ElasticsearchClient;
const result = await fetchESUsage(customCallCluster, clusterUuid, index);
expect(result).toStrictEqual({
count: 10,
enabled: true,
@ -83,12 +91,16 @@ describe('fetchESUsage', () => {
});
it('should handle no monitoring data', async () => {
const customCallCluster = jest.fn().mockImplementation(() => ({
hits: {
hits: [],
},
}));
const result = await fetchESUsage(config, customCallCluster, clusterUuid, index);
const customCallCluster = ({
search: jest.fn().mockImplementation(() => ({
body: {
hits: {
hits: [],
},
},
})),
} as unknown) as ElasticsearchClient;
const result = await fetchESUsage(customCallCluster, clusterUuid, index);
expect(result).toStrictEqual({
count: 0,
enabled: false,

View file

@ -5,30 +5,15 @@
* 2.0.
*/
import { LegacyAPICaller } from 'src/core/server';
import { ElasticsearchClient } from 'src/core/server';
import { get } from 'lodash';
import { MonitoringConfig } from '../../../config';
import { estypes } from '@elastic/elasticsearch';
import { StackProductUsage } from '../types';
interface ESResponse {
hits: {
hits: ESResponseHits[];
};
aggregations: {
indices: {
buckets: ESIndicesBucket;
};
};
}
interface ESIndicesBucket {
key: string;
}
interface ESResponseHits {
_source: ClusterStats;
}
interface ClusterStats {
cluster_stats: {
nodes: {
@ -41,16 +26,15 @@ interface ClusterStats {
}
export async function fetchESUsage(
config: MonitoringConfig,
callCluster: LegacyAPICaller,
callCluster: ElasticsearchClient,
clusterUuid: string,
index: string
): Promise<StackProductUsage> {
const params = {
const params: estypes.SearchRequest = {
index,
size: 1,
ignoreUnavailable: true,
filterPath: [
ignore_unavailable: true,
filter_path: [
'hits.hits._source.cluster_stats.nodes.count.total',
'aggregations.indices.buckets',
],
@ -101,8 +85,8 @@ export async function fetchESUsage(
},
};
const response = await callCluster('search', params);
const esResponse = response as ESResponse;
const { body: response } = await callCluster.search(params);
const esResponse = response as estypes.SearchResponse<ClusterStats>;
if (esResponse.hits.hits.length === 0) {
return {
count: 0,
@ -112,7 +96,7 @@ export async function fetchESUsage(
}
const hit = esResponse.hits.hits[0]._source;
const count = hit.cluster_stats.nodes.count.total;
const count = hit?.cluster_stats.nodes.count.total || 0;
const buckets = get(esResponse, 'aggregations.indices.buckets', []) as ESIndicesBucket[];
const metricbeatUsed = Boolean(buckets.find((indexBucket) => indexBucket.key.includes('-mb-')));

View file

@ -5,24 +5,29 @@
* 2.0.
*/
import { ElasticsearchClient } from 'kibana/server';
import { fetchLicenseType } from './fetch_license_type';
describe('fetchLicenseType', () => {
const clusterUuid = '1abcde2';
const availableCcs: string[] = [];
const callCluster = jest.fn().mockImplementation(() => ({
hits: {
hits: [
{
_source: {
license: {
type: 'trial',
const callCluster = ({
search: jest.fn().mockImplementation(() => ({
body: {
hits: {
hits: [
{
_source: {
license: {
type: 'trial',
},
},
},
},
],
},
],
},
}));
},
})),
} as unknown) as ElasticsearchClient;
it('should get the license type', async () => {
const result = await fetchLicenseType(callCluster, availableCcs, clusterUuid);
@ -30,11 +35,15 @@ describe('fetchLicenseType', () => {
});
it('should handle no license data', async () => {
const customCallCluster = jest.fn().mockImplementation(() => ({
hits: {
hits: [],
},
}));
const customCallCluster = ({
search: jest.fn().mockImplementation(() => ({
body: {
hits: {
hits: [],
},
},
})),
} as unknown) as ElasticsearchClient;
const result = await fetchLicenseType(customCallCluster, availableCcs, clusterUuid);
expect(result).toStrictEqual(null);
});

View file

@ -6,12 +6,13 @@
*/
import { get } from 'lodash';
import { LegacyAPICaller } from 'src/core/server';
import { ElasticsearchClient } from 'src/core/server';
import { estypes } from '@elastic/elasticsearch';
import { INDEX_PATTERN_ELASTICSEARCH } from '../../../../common/constants';
import { getCcsIndexPattern } from '../../../lib/alerts/get_ccs_index_pattern';
export async function fetchLicenseType(
callCluster: LegacyAPICaller,
client: ElasticsearchClient,
availableCcs: string[],
clusterUuid: string
) {
@ -19,9 +20,9 @@ export async function fetchLicenseType(
if (availableCcs) {
index = getCcsIndexPattern(index, availableCcs);
}
const params = {
const params: estypes.SearchRequest = {
index,
filterPath: ['hits.hits._source.license'],
filter_path: ['hits.hits._source.license'],
body: {
size: 1,
sort: [
@ -54,6 +55,6 @@ export async function fetchLicenseType(
},
},
};
const response = await callCluster('search', params);
const { body: response } = await client.search(params);
return get(response, 'hits.hits[0]._source.license.type', null);
}

View file

@ -5,6 +5,7 @@
* 2.0.
*/
import { ElasticsearchClient } from 'kibana/server';
import { fetchStackProductUsage } from './fetch_stack_product_usage';
describe('fetchStackProductUsage', () => {
@ -16,7 +17,27 @@ describe('fetchStackProductUsage', () => {
};
it('should use appropiate query parameters', async () => {
const callCluster = jest.fn();
const searchMock = jest.fn().mockImplementation(() => ({
body: {
aggregations: {
uuids: {
buckets: [
{
key: 'sadfsdf',
indices: {
buckets: [
{
key: '.monitoring-kibana-8',
},
],
},
},
],
},
},
},
}));
const callCluster = ({ search: searchMock } as unknown) as ElasticsearchClient;
await fetchStackProductUsage(
config,
callCluster,
@ -34,7 +55,7 @@ describe('fetchStackProductUsage', () => {
},
]
);
const params = callCluster.mock.calls[0][1];
const params = searchMock.mock.calls[0][0];
expect(params.body.query.bool.must[0].term.type.value).toBe('kibana_stats');
expect(params.body.query.bool.must[1].term.cluster_uuid.value).toBe(clusterUuid);
expect(params.body.query.bool.must[2].range.timestamp.gte).toBe('now-1h');
@ -42,24 +63,28 @@ describe('fetchStackProductUsage', () => {
});
it('should get the usage data', async () => {
const callCluster = jest.fn().mockImplementation(() => ({
aggregations: {
uuids: {
buckets: [
{
key: 'sadfsdf',
indices: {
buckets: [
{
key: '.monitoring-kibana-8',
const callCluster = ({
search: jest.fn().mockImplementation(() => ({
body: {
aggregations: {
uuids: {
buckets: [
{
key: 'sadfsdf',
indices: {
buckets: [
{
key: '.monitoring-kibana-8',
},
],
},
],
},
},
],
},
],
},
},
},
}));
})),
} as unknown) as ElasticsearchClient;
const result = await fetchStackProductUsage(
config,
@ -78,27 +103,31 @@ describe('fetchStackProductUsage', () => {
});
it('should handle both collection types', async () => {
const callCluster = jest.fn().mockImplementation(() => ({
aggregations: {
uuids: {
buckets: [
{
key: 'sadfsdf',
indices: {
buckets: [
{
key: '.monitoring-kibana-8',
const callCluster = ({
search: jest.fn().mockImplementation(() => ({
body: {
aggregations: {
uuids: {
buckets: [
{
key: 'sadfsdf',
indices: {
buckets: [
{
key: '.monitoring-kibana-8',
},
{
key: '.monitoring-kibana-mb-8',
},
],
},
{
key: '.monitoring-kibana-mb-8',
},
],
},
},
],
},
],
},
},
},
}));
})),
} as unknown) as ElasticsearchClient;
const result = await fetchStackProductUsage(
config,

View file

@ -6,7 +6,8 @@
*/
import { get } from 'lodash';
import { LegacyAPICaller } from 'src/core/server';
import { ElasticsearchClient } from 'src/core/server';
import { estypes } from '@elastic/elasticsearch';
import { MonitoringConfig } from '../../../config';
// @ts-ignore
import { prefixIndexPattern } from '../../../lib/ccs_utils';
@ -33,7 +34,7 @@ interface KeyBucket {
export async function fetchStackProductUsage(
config: MonitoringConfig,
callCluster: LegacyAPICaller,
callCluster: ElasticsearchClient,
clusterUuid: string,
index: string,
type: string,
@ -41,11 +42,11 @@ export async function fetchStackProductUsage(
filters: any[] = []
): Promise<StackProductUsage> {
const size = config.ui.max_bucket_size;
const params = {
const params: estypes.SearchRequest = {
index,
size: 0,
ignoreUnavailable: true,
filterPath: ['aggregations.uuids.buckets'],
ignore_unavailable: true,
filter_path: ['aggregations.uuids.buckets'],
body: {
query: {
bool: {
@ -94,7 +95,8 @@ export async function fetchStackProductUsage(
},
};
const response = (await callCluster('search', params)) as ESResponse;
const { body: responseBody } = await callCluster.search(params);
const response = responseBody as estypes.SearchResponse<ESResponse>;
const uuidBuckets = get(response, 'aggregations.uuids.buckets', []) as UuidBucket[];
const count = uuidBuckets.length;
const metricbeatUsed = Boolean(

View file

@ -5,6 +5,7 @@
* 2.0.
*/
import { ElasticsearchClient } from 'kibana/server';
import { getStackProductsUsage } from './get_stack_products_usage';
describe('getStackProductsUsage', () => {
@ -15,11 +16,15 @@ describe('getStackProductsUsage', () => {
};
const clusterUuid = '1abcde2';
const availableCcs: string[] = [];
const callCluster = jest.fn().mockImplementation(() => ({
hits: {
hits: [],
},
}));
const callCluster = ({
search: jest.fn().mockImplementation(() => ({
body: {
hits: {
hits: [],
},
},
})),
} as unknown) as ElasticsearchClient;
it('should get all stack products', async () => {
const result = await getStackProductsUsage(config, callCluster, availableCcs, clusterUuid);

View file

@ -5,7 +5,7 @@
* 2.0.
*/
import { LegacyAPICaller } from 'src/core/server';
import { ElasticsearchClient } from 'src/core/server';
import { MonitoringClusterStackProductUsage } from '../types';
import { fetchESUsage } from './fetch_es_usage';
import { MonitoringConfig } from '../../../config';
@ -24,7 +24,7 @@ import { getCcsIndexPattern } from '../../../lib/alerts/get_ccs_index_pattern';
export const getStackProductsUsage = async (
config: MonitoringConfig,
callCluster: LegacyAPICaller,
callCluster: ElasticsearchClient,
availableCcs: string[],
clusterUuid: string
): Promise<
@ -38,7 +38,7 @@ export const getStackProductsUsage = async (
const logstashIndex = getCcsIndexPattern(INDEX_PATTERN_LOGSTASH, availableCcs);
const beatsIndex = getCcsIndexPattern(INDEX_PATTERN_BEATS, availableCcs);
const [elasticsearch, kibana, logstash, beats, apm] = await Promise.all([
fetchESUsage(config, callCluster, clusterUuid, elasticsearchIndex),
fetchESUsage(callCluster, clusterUuid, elasticsearchIndex),
fetchStackProductUsage(
config,
callCluster,

View file

@ -6,7 +6,7 @@
*/
import { Logger } from 'kibana/server';
import { LegacyAPICaller } from 'src/core/server';
import { ElasticsearchClient } from 'src/core/server';
interface DisableWatchesResponse {
exporters: Array<
@ -22,9 +22,13 @@ interface DisableWatchesResponse {
>;
}
async function callMigrationApi(callCluster: LegacyAPICaller, logger: Logger) {
async function callMigrationApi(callCluster: ElasticsearchClient, logger: Logger) {
try {
return await callCluster('monitoring.disableWatches');
const { body: response } = await callCluster.transport.request({
method: 'post',
path: '/monitoring.disableWatches',
});
return response as DisableWatchesResponse;
} catch (err) {
logger.warn(
`Unable to call migration api to disable cluster alert watches. Message=${err.message}`
@ -33,8 +37,11 @@ async function callMigrationApi(callCluster: LegacyAPICaller, logger: Logger) {
}
}
export async function disableWatcherClusterAlerts(callCluster: LegacyAPICaller, logger: Logger) {
const response: DisableWatchesResponse = await callMigrationApi(callCluster, logger);
export async function disableWatcherClusterAlerts(
callCluster: ElasticsearchClient,
logger: Logger
) {
const response: DisableWatchesResponse | undefined = await callMigrationApi(callCluster, logger);
if (!response || response.exporters.length === 0) {
return true;
}

View file

@ -18,7 +18,7 @@ export async function fetchCCRReadExceptions(
): Promise<CCRReadExceptionsStats[]> {
const params = {
index,
filterPath: ['aggregations.remote_clusters.buckets'],
filter_path: ['aggregations.remote_clusters.buckets'],
body: {
size: 0,
query: {

View file

@ -15,7 +15,7 @@ export async function fetchClusterHealth(
): Promise<AlertClusterHealth[]> {
const params = {
index,
filterPath: [
filter_path: [
'hits.hits._source.cluster_state.status',
'hits.hits._source.cluster_uuid',
'hits.hits._index',

View file

@ -23,7 +23,7 @@ export async function fetchClusters(
): Promise<AlertCluster[]> {
const params = {
index,
filterPath: [
filter_path: [
'hits.hits._source.cluster_settings.cluster.metadata.display_name',
'hits.hits._source.cluster_uuid',
'hits.hits._source.cluster_name',
@ -70,7 +70,7 @@ export async function fetchClustersLegacy(
): Promise<AlertCluster[]> {
const params = {
index,
filterPath: [
filter_path: [
'hits.hits._source.cluster_settings.cluster.metadata.display_name',
'hits.hits._source.cluster_uuid',
'hits.hits._source.cluster_name',

View file

@ -204,7 +204,7 @@ describe('fetchCpuUsageNodeStats', () => {
await fetchCpuUsageNodeStats(esClient, clusters, index, startMs, endMs, size);
expect(params).toStrictEqual({
index: '.monitoring-es-*',
filterPath: ['aggregations'],
filter_path: ['aggregations'],
body: {
size: 0,
query: {

View file

@ -34,10 +34,9 @@ export async function fetchCpuUsageNodeStats(
// Using pure MS didn't seem to work well with the date_histogram interval
// but minutes does
const intervalInMinutes = moment.duration(endMs - startMs).asMinutes();
const filterPath = ['aggregations'];
const params = {
index,
filterPath,
filter_path: ['aggregations'],
body: {
size: 0,
query: {

View file

@ -19,7 +19,7 @@ export async function fetchDiskUsageNodeStats(
const clustersIds = clusters.map((cluster) => cluster.clusterUuid);
const params = {
index,
filterPath: ['aggregations'],
filter_path: ['aggregations'],
body: {
size: 0,
query: {

View file

@ -16,7 +16,7 @@ export async function fetchElasticsearchVersions(
): Promise<AlertVersions[]> {
const params = {
index,
filterPath: [
filter_path: [
'hits.hits._source.cluster_stats.nodes.versions',
'hits.hits._index',
'hits.hits._source.cluster_uuid',

View file

@ -39,7 +39,7 @@ export async function fetchIndexShardSize(
): Promise<IndexShardSizeStats[]> {
const params = {
index,
filterPath: ['aggregations.clusters.buckets'],
filter_path: ['aggregations.clusters.buckets'],
body: {
size: 0,
query: {

View file

@ -20,7 +20,7 @@ export async function fetchKibanaVersions(
): Promise<AlertVersions[]> {
const params = {
index,
filterPath: ['aggregations'],
filter_path: ['aggregations'],
body: {
size: 0,
query: {

View file

@ -15,7 +15,7 @@ export async function fetchLicenses(
): Promise<AlertLicense[]> {
const params = {
index,
filterPath: [
filter_path: [
'hits.hits._source.license.*',
'hits.hits._source.cluster_uuid',
'hits.hits._index',

View file

@ -20,7 +20,7 @@ export async function fetchLogstashVersions(
): Promise<AlertVersions[]> {
const params = {
index,
filterPath: ['aggregations'],
filter_path: ['aggregations'],
body: {
size: 0,
query: {

View file

@ -20,7 +20,7 @@ export async function fetchMemoryUsageNodeStats(
const clustersIds = clusters.map((cluster) => cluster.clusterUuid);
const params = {
index,
filterPath: ['aggregations'],
filter_path: ['aggregations'],
body: {
size: 0,
query: {

View file

@ -52,7 +52,7 @@ export async function fetchMissingMonitoringData(
const endMs = nowInMs;
const params = {
index,
filterPath: ['aggregations.clusters.buckets'],
filter_path: ['aggregations.clusters.buckets'],
body: {
size: 0,
query: {

View file

@ -30,7 +30,7 @@ export async function fetchNodesFromClusterStats(
): Promise<AlertClusterStatsNodes[]> {
const params = {
index,
filterPath: ['aggregations.clusters.buckets'],
filter_path: ['aggregations.clusters.buckets'],
body: {
size: 0,
sort: [

View file

@ -41,7 +41,7 @@ export async function fetchThreadPoolRejectionStats(
const clustersIds = clusters.map((cluster) => cluster.clusterUuid);
const params = {
index,
filterPath: ['aggregations'],
filter_path: ['aggregations'],
body: {
size: 0,
query: {

View file

@ -30,7 +30,7 @@ export async function getTimeOfLastEvent({
const params = {
index: apmIndexPattern,
size: 1,
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
_source: ['beats_stats.timestamp', '@timestamp'],
sort: [

View file

@ -97,8 +97,8 @@ export async function getApmInfo(
const params = {
index: apmIndexPattern,
size: 1,
ignoreUnavailable: true,
filterPath: [
ignore_unavailable: true,
filter_path: [
'hits.hits._source.beats_stats.beat.host',
'hits.hits._source.beats_stats.beat.version',
'hits.hits._source.beats_stats.beat.name',

View file

@ -109,8 +109,8 @@ export async function getApms(req: LegacyRequest, apmIndexPattern: string, clust
const params = {
index: apmIndexPattern,
size: config.get('monitoring.ui.max_bucket_size'), // FIXME
ignoreUnavailable: true,
filterPath: [
ignore_unavailable: true,
filter_path: [
// only filter path can filter for inner_hits
'hits.hits._source.timestamp',
'hits.hits._source.@timestamp',

View file

@ -46,8 +46,8 @@ export function getApmsForClusters(req, apmIndexPattern, clusters) {
const params = {
index: apmIndexPattern,
size: 0,
ignoreUnavailable: true,
filterPath: apmAggFilterPath,
ignore_unavailable: true,
filter_path: apmAggFilterPath,
body: {
query: createApmQuery({
start,

View file

@ -34,9 +34,9 @@ export async function getStats(req, apmIndexPattern, clusterUuid) {
const params = {
index: apmIndexPattern,
filterPath: apmAggFilterPath,
filter_path: apmAggFilterPath,
size: 0,
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
query: createApmQuery({
start,

View file

@ -89,8 +89,8 @@ export async function getBeatSummary(
const params = {
index: beatsIndexPattern,
size: 1,
ignoreUnavailable: true,
filterPath: [
ignore_unavailable: true,
filter_path: [
'hits.hits._source.beats_stats.beat.host',
'hits.hits._source.beat.stats.beat.host',
'hits.hits._source.beats_stats.beat.version',

View file

@ -121,8 +121,8 @@ export async function getBeats(req: LegacyRequest, beatsIndexPattern: string, cl
const params = {
index: beatsIndexPattern,
size: config.get('monitoring.ui.max_bucket_size'), // FIXME
ignoreUnavailable: true,
filterPath: [
ignore_unavailable: true,
filter_path: [
// only filter path can filter for inner_hits
'hits.hits._source.beats_stats.beat.uuid',
'hits.hits._source.beat.stats.beat.uuid',

View file

@ -44,8 +44,8 @@ export function getBeatsForClusters(req, beatsIndexPattern, clusters) {
const params = {
index: beatsIndexPattern,
size: 0,
ignoreUnavailable: true,
filterPath: beatsAggFilterPath,
ignore_unavailable: true,
filter_path: beatsAggFilterPath,
body: {
query: createBeatsQuery({
start,

View file

@ -81,8 +81,8 @@ export function getLatestStats(req, beatsIndexPattern, clusterUuid) {
const params = {
index: beatsIndexPattern,
size: 0,
ignoreUnavailable: true,
filterPath: 'aggregations',
ignore_unavailable: true,
filter_path: 'aggregations',
body: {
query: createBeatsQuery({
clusterUuid,

View file

@ -33,9 +33,9 @@ export async function getStats(req, beatsIndexPattern, clusterUuid) {
const params = {
index: beatsIndexPattern,
filterPath: beatsAggFilterPath,
filter_path: beatsAggFilterPath,
size: 0,
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
query: createBeatsQuery({
start,

View file

@ -30,8 +30,8 @@ async function findSupportedBasicLicenseCluster(
const kibanaDataResult: ElasticsearchResponse = (await callWithRequest(req, 'search', {
index: kbnIndexPattern,
size: 1,
ignoreUnavailable: true,
filterPath: ['hits.hits._source.cluster_uuid', 'hits.hits._source.cluster.id'],
ignore_unavailable: true,
filter_path: ['hits.hits._source.cluster_uuid', 'hits.hits._source.cluster.id'],
body: {
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
query: {

View file

@ -20,8 +20,8 @@ export function getClusterLicense(req: LegacyRequest, esIndexPattern: string, cl
const params = {
index: esIndexPattern,
size: 1,
ignoreUnavailable: true,
filterPath: 'hits.hits._source.license',
ignore_unavailable: true,
filter_path: ['hits.hits._source.license'],
body: {
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
query: createQuery({

View file

@ -66,8 +66,8 @@ export function getClustersState(
const params = {
index: esIndexPattern,
size: clusterUuids.length,
ignoreUnavailable: true,
filterPath: [
ignore_unavailable: true,
filter_path: [
'hits.hits._source.cluster_uuid',
'hits.hits._source.elasticsearch.cluster.id',
'hits.hits._source.cluster_state',

View file

@ -53,8 +53,8 @@ function fetchClusterStats(req: LegacyRequest, esIndexPattern: string, clusterUu
const params = {
index: esIndexPattern,
size: config.get('monitoring.ui.max_bucket_size'),
ignoreUnavailable: true,
filterPath: [
ignore_unavailable: true,
filter_path: [
'hits.hits._index',
'hits.hits._source.cluster_uuid',
'hits.hits._source.elasticsearch.cluster.id',

View file

@ -150,7 +150,7 @@ async function fetchSeries(
const params = {
index: indexPattern,
size: 0,
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
query: createQuery({
start: adjustedMin,

View file

@ -27,7 +27,7 @@ export async function checkCcrEnabled(req: LegacyRequest, esIndexPattern: string
const params = {
index: esIndexPattern,
size: 1,
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
query: createQuery({
type: 'cluster_stats',
@ -38,7 +38,7 @@ export async function checkCcrEnabled(req: LegacyRequest, esIndexPattern: string
}),
sort: [{ timestamp: { order: 'desc', unmapped_type: 'long' } }],
},
filterPath: [
filter_path: [
'hits.hits._source.stack_stats.xpack.ccr',
'hits.hits._source.elasticsearch.cluster.stats.stack.xpack.ccr',
],

View file

@ -97,7 +97,7 @@ export async function getLastRecovery(req: LegacyRequest, esIndexPattern: string
const legacyParams = {
index: esIndexPattern,
size: 1,
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
_source: ['index_recovery.shards'],
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },

View file

@ -47,8 +47,8 @@ export function getMlJobs(req: LegacyRequest, esIndexPattern: string) {
const params = {
index: esIndexPattern,
size: maxBucketSize,
ignoreUnavailable: true,
filterPath: [
ignore_unavailable: true,
filter_path: [
'hits.hits._source.job_stats.job_id',
'hits.hits._source.elasticsearch.ml.job.id',
'hits.hits._source.job_stats.state',
@ -95,8 +95,8 @@ export function getMlJobsForCluster(
const params = {
index: esIndexPattern,
size: 0,
ignoreUnavailable: true,
filterPath: 'aggregations.jobs_count.value',
ignore_unavailable: true,
filter_path: 'aggregations.jobs_count.value',
body: {
query: createQuery({ types: ['ml_job', 'job_stats'], start, end, clusterUuid, metric }),
aggs: {

View file

@ -89,7 +89,7 @@ export function getIndexSummary(
const params = {
index: esIndexPattern,
size: 1,
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
query: createQuery({

View file

@ -116,8 +116,8 @@ export function buildGetIndicesQuery(
return {
index: esIndexPattern,
size,
ignoreUnavailable: true,
filterPath: [
ignore_unavailable: true,
filter_path: [
// only filter path can filter for inner_hits
'hits.hits._source.index_stats.index',
'hits.hits._source.elasticsearch.index.name',

View file

@ -124,7 +124,7 @@ export function getNodeSummary(
const params = {
index: esIndexPattern,
size: 1,
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
query: createQuery({ type: 'node_stats', start, end, clusterUuid, metric, filters }),

View file

@ -17,8 +17,8 @@ export async function getNodeIds(req, indexPattern, { clusterUuid }, size) {
const params = {
index: indexPattern,
size: 0,
ignoreUnavailable: true,
filterPath: ['aggregations.composite_data.buckets'],
ignore_unavailable: true,
filter_path: ['aggregations.composite_data.buckets'],
body: {
query: createQuery({
type: 'node_stats',

View file

@ -76,7 +76,7 @@ export async function getNodes(
const params = {
index: esIndexPattern,
size: config.get('monitoring.ui.max_bucket_size'),
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
query: createQuery({
type: 'node_stats',
@ -110,7 +110,7 @@ export async function getNodes(
},
sort: [{ timestamp: { order: 'desc', unmapped_type: 'long' } }],
},
filterPath: [
filter_path: [
'hits.hits._source.source_node',
'hits.hits._source.service.address',
'hits.hits._source.elasticsearch.node',

View file

@ -41,7 +41,7 @@ async function getUnassignedShardData(
const params = {
index: esIndexPattern,
size: 0,
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
query: createQuery({

View file

@ -39,7 +39,7 @@ async function getShardCountPerNode(
const params = {
index: esIndexPattern,
size: 0,
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
query: createQuery({

View file

@ -103,7 +103,7 @@ export function getShardAllocation(
const params = {
index: esIndexPattern,
size: config.get('monitoring.ui.max_bucket_size'),
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
query: createQuery({ types: ['shard', 'shards'], clusterUuid, metric, filters }),
},

View file

@ -96,7 +96,7 @@ export function getShardStats(
const params = {
index: esIndexPattern,
size: 0,
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
query: createQuery({

View file

@ -53,7 +53,7 @@ async function verifyHasPrivileges(req) {
},
],
},
ignoreUnavailable: true, // we allow 404 incase the user shutdown security in-between the check and now
ignore_unavailable: true, // we allow 404 incase the user shutdown security in-between the check and now
});
} catch (err) {
if (

View file

@ -34,15 +34,6 @@ export async function checkClusterSettings(req) {
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('admin');
const { cloud } = req.server.newPlatform.setup.plugins;
const isCloudEnabled = !!(cloud && cloud.isCloudEnabled);
const response = await callWithRequest(req, 'transport.request', {
method: 'GET',
path: '/_cluster/settings?include_defaults',
filter_path: [
'persistent.xpack.monitoring',
'transient.xpack.monitoring',
'defaults.xpack.monitoring',
],
});
const response = await callWithRequest(req, 'cluster.getSettings', { include_defaults: true });
return handleResponse(response, isCloudEnabled);
}

View file

@ -36,8 +36,8 @@ export function getKibanaInfo(
const params = {
index: kbnIndexPattern,
size: 1,
ignoreUnavailable: true,
filterPath: [
ignore_unavailable: true,
filter_path: [
'hits.hits._source.kibana_stats.kibana',
'hits.hits._source.kibana.kibana',
'hits.hits._source.kibana_stats.os.memory.free_in_bytes',

View file

@ -71,7 +71,7 @@ export async function getKibanas(
const params = {
index: kbnIndexPattern,
size: config.get('monitoring.ui.max_bucket_size'),
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
query: createQuery({
types: ['kibana_stats', 'stats'],

View file

@ -37,7 +37,7 @@ export function getKibanasForClusters(req, kbnIndexPattern, clusters) {
const params = {
index: kbnIndexPattern,
size: 0,
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
query: createQuery({
types: ['stats', 'kibana_stats'],

View file

@ -84,8 +84,8 @@ export async function getLogTypes(
const params = {
index: filebeatIndexPattern,
size: 0,
filterPath: ['aggregations.levels.buckets', 'aggregations.types.buckets'],
ignoreUnavailable: true,
filter_path: ['aggregations.levels.buckets', 'aggregations.types.buckets'],
ignore_unavailable: true,
body: {
sort: { '@timestamp': { order: 'desc', unmapped_type: 'long' } },
query: {

View file

@ -100,7 +100,7 @@ export async function getLogs(
const params = {
index: filebeatIndexPattern,
size: Math.min(50, config.get('monitoring.ui.elasticsearch.logFetchCount')),
filterPath: [
filter_path: [
'hits.hits._source.message',
'hits.hits._source.log.level',
'hits.hits._source.@timestamp',
@ -109,7 +109,7 @@ export async function getLogs(
'hits.hits._source.elasticsearch.index.name',
'hits.hits._source.elasticsearch.node.name',
],
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
sort: { '@timestamp': { order: 'desc', unmapped_type: 'long' } },
query: {

View file

@ -48,7 +48,7 @@ export function getLogstashForClusters(req, lsIndexPattern, clusters) {
const params = {
index: lsIndexPattern,
size: 0,
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
query: createQuery({
types: ['stats', 'logstash_stats'],

View file

@ -45,8 +45,8 @@ export function getNodeInfo(
const params = {
index: lsIndexPattern,
size: 1,
ignoreUnavailable: true,
filterPath: [
ignore_unavailable: true,
filter_path: [
'hits.hits._source.logstash_stats.events',
'hits.hits._source.logstash.node.stats.events',
'hits.hits._source.logstash_stats.jvm.uptime_in_millis',

View file

@ -82,7 +82,7 @@ export async function getNodes(
const params = {
index: lsIndexPattern,
size: config.get('monitoring.ui.max_bucket_size'), // FIXME
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
query: createQuery({
start,

View file

@ -27,8 +27,8 @@ export async function getLogstashPipelineIds(
const params = {
index: logstashIndexPattern,
size: 0,
ignoreUnavailable: true,
filterPath: ['aggregations.nest.id.buckets', 'aggregations.nest_mb.id.buckets'],
ignore_unavailable: true,
filter_path: ['aggregations.nest.id.buckets', 'aggregations.nest_mb.id.buckets'],
body: {
query: createQuery({
start,

View file

@ -42,7 +42,7 @@ export async function getPipelineStateDocument(
const params = {
index: logstashIndexPattern,
size: 1,
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
_source: { excludes: 'logstash_state.pipeline.representation.plugins' },
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },

View file

@ -106,8 +106,8 @@ function fetchPipelineLatestStats(
const params = {
index: logstashIndexPattern,
size: 0,
ignoreUnavailable: true,
filterPath: [
ignore_unavailable: true,
filter_path: [
'aggregations.pipelines.scoped.vertices.vertex_id.buckets.key',
'aggregations.pipelines.scoped.vertices.vertex_id.buckets.events_in_total',
'aggregations.pipelines.scoped.vertices.vertex_id.buckets.events_out_total',

View file

@ -82,7 +82,7 @@ function fetchPipelineVersions(...args) {
const params = {
index: logstashIndexPattern,
size: 0,
ignoreUnavailable: true,
ignore_unavailable: true,
body: {
sort: { timestamp: { order: 'desc', unmapped_type: 'long' } },
query,

View file

@ -155,8 +155,8 @@ function fetchPipelineVertexTimeSeriesStats(
const params = {
index: logstashIndexPattern,
size: 0,
ignoreUnavailable: true,
filterPath: [
ignore_unavailable: true,
filter_path: [
'aggregations.timeseries.buckets.key',
'aggregations.timeseries.buckets.pipelines.scoped.vertices.vertex_id.events_in_total',
'aggregations.timeseries.buckets.pipelines.scoped.vertices.vertex_id.events_out_total',

View file

@ -57,8 +57,8 @@ const getRecentMonitoringDocuments = async (req, indexPatterns, clusterUuid, nod
const params = {
index: Object.values(indexPatterns),
size: 0,
ignoreUnavailable: true,
filterPath: ['aggregations.indices.buckets'],
ignore_unavailable: true,
filter_path: ['aggregations.indices.buckets'],
body: {
query: {
bool: {
@ -206,8 +206,8 @@ async function doesIndexExist(req, index) {
index,
size: 0,
terminate_after: 1,
ignoreUnavailable: true,
filterPath: ['hits.total.value'],
ignore_unavailable: true,
filter_path: ['hits.total.value'],
};
const { callWithRequest } = req.server.plugins.elasticsearch.getCluster('monitoring');
const response = await callWithRequest(req, 'search', params);

View file

@ -6,16 +6,17 @@
*/
import { Subscription } from 'rxjs';
import { ILegacyCustomClusterClient } from 'kibana/server';
import { IClusterClient, ILegacyClusterClient } from 'kibana/server';
import { ILicense, LicenseFeature } from '../../licensing/common/types';
import { LicensingPluginStart } from '../../licensing/server';
import { MonitoringConfig } from './config';
import { Logger } from '../../../../src/core/server';
import { MonitoringLicenseService } from './types';
import { EndpointTypes, Globals, ClientParams } from './static_globals';
interface SetupDeps {
licensing: LicensingPluginStart;
monitoringClient: ILegacyCustomClusterClient;
monitoringClient: IClusterClient;
config: MonitoringConfig;
log: Logger;
}
@ -27,8 +28,15 @@ const defaultLicenseFeature: LicenseFeature = {
export class LicenseService {
public setup({ licensing, monitoringClient, config, log }: SetupDeps): MonitoringLicenseService {
// TODO: This needs to be changed to an IClusterClient as when the Licensing server
// is upgraded to the new client.
const fakeLegacyClusterClient = {
callAsInternalUser: (endpoint: EndpointTypes, options: ClientParams) =>
Globals.app.getLegacyClusterShim(monitoringClient.asInternalUser, endpoint, options),
} as ILegacyClusterClient;
const { refresh, license$ } = licensing.createLicensePoller(
monitoringClient,
fakeLegacyClusterClient,
config.licensing.api_polling_frequency.asMilliseconds()
);

View file

@ -15,11 +15,12 @@ import {
KibanaRequest,
KibanaResponseFactory,
CoreSetup,
ILegacyCustomClusterClient,
ICustomClusterClient,
CoreStart,
CustomHttpResponseOptions,
ResponseError,
Plugin,
SharedGlobalConfig,
} from 'kibana/server';
import { DEFAULT_APP_CATEGORIES } from '../../../../src/core/server';
import {
@ -33,8 +34,6 @@ import { MonitoringConfig, createConfig, configSchema } from './config';
import { requireUIRoutes } from './routes';
import { initBulkUploader } from './kibana_monitoring';
import { initInfraSource } from './lib/logs/init_infra_source';
import { mbSafeQuery } from './lib/mb_safe_query';
import { instantiateClient } from './es_client/instantiate_client';
import { registerCollectors } from './kibana_monitoring/collectors';
import { registerMonitoringTelemetryCollection } from './telemetry_collection';
import { LicenseService } from './license_service';
@ -51,7 +50,8 @@ import {
RequestHandlerContextMonitoringPlugin,
} from './types';
import { Globals } from './static_globals';
import { Globals, EndpointTypes } from './static_globals';
import { instantiateClient } from './es_client/instantiate_client';
// This is used to test the version of kibana
const snapshotRegex = /-snapshot/i;
@ -71,50 +71,68 @@ export class MonitoringPlugin
private readonly initializerContext: PluginInitializerContext;
private readonly log: Logger;
private readonly getLogger: (...scopes: string[]) => Logger;
private cluster = {} as ILegacyCustomClusterClient;
private cluster = {} as ICustomClusterClient;
private licenseService = {} as MonitoringLicenseService;
private monitoringCore = {} as MonitoringCore;
private legacyShimDependencies = {} as LegacyShimDependencies;
private bulkUploader: IBulkUploader | undefined;
private bulkUploader?: IBulkUploader;
private readonly config: MonitoringConfig;
private readonly legacyConfig: SharedGlobalConfig;
private coreSetup?: CoreSetup;
private setupPlugins?: PluginsSetup;
constructor(initializerContext: PluginInitializerContext) {
this.initializerContext = initializerContext;
this.log = initializerContext.logger.get(LOGGING_TAG);
this.getLogger = (...scopes: string[]) => initializerContext.logger.get(LOGGING_TAG, ...scopes);
this.config = createConfig(this.initializerContext.config.get<TypeOf<typeof configSchema>>());
this.legacyConfig = this.initializerContext.config.legacy.get();
}
setup(core: CoreSetup, plugins: PluginsSetup) {
const config = createConfig(this.initializerContext.config.get<TypeOf<typeof configSchema>>());
const legacyConfig = this.initializerContext.config.legacy.get();
setup(coreSetup: CoreSetup, plugins: PluginsSetup) {
this.coreSetup = coreSetup;
this.setupPlugins = plugins;
const router = core.http.createRouter<RequestHandlerContextMonitoringPlugin>();
this.legacyShimDependencies = {
router,
instanceUuid: this.initializerContext.env.instanceUuid,
esDataClient: core.elasticsearch.legacy.client,
kibanaStatsCollector: plugins.usageCollection?.getCollectorByType(
KIBANA_STATS_TYPE_MONITORING
),
};
const serverInfo = coreSetup.http.getServerInfo();
const kibanaMonitoringLog = this.getLogger(KIBANA_MONITORING_LOGGING_TAG);
this.bulkUploader = initBulkUploader({
config: this.config,
log: kibanaMonitoringLog,
opsMetrics$: coreSetup.metrics.getOpsMetrics$(),
statusGetter$: coreSetup.status.overall$,
kibanaStats: {
uuid: this.initializerContext.env.instanceUuid,
name: serverInfo.name,
index: this.legacyConfig.kibana.index,
host: serverInfo.hostname,
locale: i18n.getLocale(),
port: serverInfo.port.toString(),
transport_address: `${serverInfo.hostname}:${serverInfo.port}`,
version: this.initializerContext.env.packageInfo.version,
snapshot: snapshotRegex.test(this.initializerContext.env.packageInfo.version),
},
});
// Monitoring creates and maintains a connection to a potentially
// separate ES cluster - create this first
const cluster = (this.cluster = instantiateClient(
config.ui.elasticsearch,
this.log,
core.elasticsearch.legacy.createClient
));
Globals.init({
initializerContext: this.initializerContext,
config: this.config!,
getLogger: this.getLogger,
log: this.log,
legacyConfig: this.legacyConfig,
coreSetup: this.coreSetup!,
setupPlugins: this.setupPlugins!,
});
Globals.init(core, plugins.cloud, cluster, config, this.getLogger);
const serverInfo = core.http.getServerInfo();
const alerts = AlertsFactory.getAll();
for (const alert of alerts) {
plugins.alerting?.registerType(alert.getAlertType());
}
const config = createConfig(this.initializerContext.config.get<TypeOf<typeof configSchema>>());
// Register collector objects for stats to show up in the APIs
if (plugins.usageCollection) {
core.savedObjects.registerType({
coreSetup.savedObjects.registerType({
name: SAVED_OBJECT_TELEMETRY,
hidden: true,
namespaceType: 'agnostic',
@ -127,33 +145,40 @@ export class MonitoringPlugin
},
});
registerCollectors(plugins.usageCollection, config, cluster);
registerCollectors(plugins.usageCollection, config, () => this.cluster);
registerMonitoringTelemetryCollection(
plugins.usageCollection,
cluster,
() => this.cluster,
config.ui.max_bucket_size
);
}
if (config.ui.enabled) {
this.registerPluginInUI(plugins);
}
// Always create the bulk uploader
const kibanaMonitoringLog = this.getLogger(KIBANA_MONITORING_LOGGING_TAG);
const bulkUploader = (this.bulkUploader = initBulkUploader({
config,
log: kibanaMonitoringLog,
opsMetrics$: core.metrics.getOpsMetrics$(),
statusGetter$: core.status.overall$,
kibanaStats: {
uuid: this.initializerContext.env.instanceUuid,
name: serverInfo.name,
index: get(legacyConfig, 'kibana.index'),
host: serverInfo.hostname,
locale: i18n.getLocale(),
port: serverInfo.port.toString(),
transport_address: `${serverInfo.hostname}:${serverInfo.port}`,
version: this.initializerContext.env.packageInfo.version,
snapshot: snapshotRegex.test(this.initializerContext.env.packageInfo.version),
},
}));
return {
// OSS stats api needs to call this in order to centralize how
// we fetch kibana specific stats
getKibanaStats: () => this.bulkUploader?.getKibanaStats() || {},
};
}
init(cluster: ICustomClusterClient, coreStart: CoreStart) {
const config = createConfig(this.initializerContext.config.get<TypeOf<typeof configSchema>>());
const legacyConfig = this.initializerContext.config.legacy.get();
const coreSetup = this.coreSetup!;
const plugins = this.setupPlugins!;
const router = coreSetup.http.createRouter<RequestHandlerContextMonitoringPlugin>();
// const [{ elasticsearch }] = await core.getStartServices();
this.legacyShimDependencies = {
router,
instanceUuid: this.initializerContext.env.instanceUuid,
esDataClient: coreStart.elasticsearch.client.asInternalUser,
kibanaStatsCollector: plugins.usageCollection?.getCollectorByType(
KIBANA_STATS_TYPE_MONITORING
),
};
// If the UI is enabled, then we want to register it so it shows up
// and start any other UI-related setup tasks
@ -162,12 +187,11 @@ export class MonitoringPlugin
this.monitoringCore = this.getLegacyShim(
config,
legacyConfig,
core.getStartServices as () => Promise<[CoreStart, PluginsStart, {}]>,
this.cluster,
coreSetup.getStartServices as () => Promise<[CoreStart, PluginsStart, {}]>,
cluster,
plugins
);
this.registerPluginInUI(plugins);
requireUIRoutes(this.monitoringCore, {
cluster,
router,
@ -177,16 +201,18 @@ export class MonitoringPlugin
});
initInfraSource(config, plugins.infra);
}
return {
// OSS stats api needs to call this in order to centralize how
// we fetch kibana specific stats
getKibanaStats: () => bulkUploader.getKibanaStats(),
};
}
async start(core: CoreStart, { licensing }: PluginsStart) {
const config = createConfig(this.initializerContext.config.get<TypeOf<typeof configSchema>>());
async start(coreStart: CoreStart, { licensing }: PluginsStart) {
const config = this.config!;
this.cluster = instantiateClient(
config.ui.elasticsearch,
this.log,
coreStart.elasticsearch.createClient
);
this.init(this.cluster, coreStart);
// Start our license service which will ensure
// the appropriate licenses are present
this.licenseService = new LicenseService().setup({
@ -209,7 +235,7 @@ export class MonitoringPlugin
const monitoringBulkEnabled =
mainMonitoring && mainMonitoring.isAvailable && mainMonitoring.isEnabled;
if (monitoringBulkEnabled) {
this.bulkUploader?.start(core.elasticsearch.client.asInternalUser);
this.bulkUploader?.start(coreStart.elasticsearch.client.asInternalUser);
} else {
this.bulkUploader?.handleNotEnabled();
}
@ -227,7 +253,7 @@ export class MonitoringPlugin
}
stop() {
if (this.cluster) {
if (this.cluster && this.cluster.close) {
this.cluster.close();
}
if (this.licenseService && this.licenseService.stop) {
@ -281,7 +307,7 @@ export class MonitoringPlugin
config: MonitoringConfig,
legacyConfig: any,
getCoreServices: () => Promise<[CoreStart, PluginsStart, {}]>,
cluster: ILegacyCustomClusterClient,
cluster: ICustomClusterClient,
setupPlugins: PluginsSetup
): MonitoringCore {
const router = this.legacyShimDependencies.router;
@ -354,12 +380,12 @@ export class MonitoringPlugin
},
elasticsearch: {
getCluster: (name: string) => ({
callWithRequest: async (_req: any, endpoint: string, params: any) => {
callWithRequest: async (_req: any, endpoint: EndpointTypes, params: any) => {
const client =
name === 'monitoring' ? cluster : this.legacyShimDependencies.esDataClient;
return mbSafeQuery(() =>
client.asScoped(req).callAsCurrentUser(endpoint, params)
);
name === 'monitoring'
? cluster.asScoped(req).asCurrentUser
: context.core.elasticsearch.client.asCurrentUser;
return await Globals.app.getLegacyClusterShim(client, endpoint, params);
},
}),
},

View file

@ -93,7 +93,7 @@ export function enableAlertsRoute(server: LegacyServer, npRoute: RouteDependenci
let createdAlerts: Array<SanitizedAlert<AlertTypeParams>> = [];
const disabledWatcherClusterAlerts = await disableWatcherClusterAlerts(
npRoute.cluster.asScoped(request).callAsCurrentUser,
npRoute.cluster.asScoped(request).asCurrentUser,
npRoute.logger
);

View file

@ -99,7 +99,7 @@ function buildRequest(
return {
index: esIndexPattern,
size: maxBucketSize,
filterPath: [
filter_path: [
'hits.hits.inner_hits.by_shard.hits.hits._source.ccr_stats.read_exceptions',
'hits.hits.inner_hits.by_shard.hits.hits._source.elasticsearch.ccr.read_exceptions',
'hits.hits.inner_hits.by_shard.hits.hits._source.ccr_stats.follower_index',

View file

@ -35,7 +35,7 @@ async function getCcrStat(req: LegacyRequest, esIndexPattern: string, filters: u
const params = {
index: esIndexPattern,
size: 1,
filterPath: [
filter_path: [
'hits.hits._source.ccr_stats',
'hits.hits._source.elasticsearch.ccr',
'hits.hits._source.timestamp',

View file

@ -23,6 +23,7 @@ export function clusterSettingsCheckRoute(server) {
const response = await checkClusterSettings(req); // needs to be try/catch to handle privilege error
return response;
} catch (err) {
console.log(err);
throw handleSettingsError(err);
}
},

View file

@ -7,6 +7,7 @@
import { schema } from '@kbn/config-schema';
import { RequestHandlerContext } from 'kibana/server';
import { estypes } from '@elastic/elasticsearch';
import {
INDEX_PATTERN_ELASTICSEARCH,
INDEX_PATTERN_KIBANA,
@ -44,11 +45,11 @@ const queryBody = {
};
const checkLatestMonitoringIsLegacy = async (context: RequestHandlerContext, index: string) => {
const { client: esClient } = context.core.elasticsearch.legacy;
const result = await esClient.callAsCurrentUser('search', {
const client = context.core.elasticsearch.client.asCurrentUser;
const { body: result } = await client.search<estypes.SearchResponse<unknown>>({
index,
body: queryBody,
});
} as estypes.SearchRequest);
const { aggregations } = result;
const counts = {
@ -62,7 +63,7 @@ const checkLatestMonitoringIsLegacy = async (context: RequestHandlerContext, ind
const {
types: { buckets },
} = aggregations;
} = aggregations as { types: { buckets: Array<{ key: string }> } };
counts.mbIndicesCount = buckets.filter(({ key }: { key: string }) => key.includes('-mb-')).length;
counts.legacyIndicesCount = buckets.length - counts.mbIndicesCount;

View file

@ -5,20 +5,50 @@
* 2.0.
*/
import { CoreSetup, ILegacyCustomClusterClient, Logger } from 'kibana/server';
import {
CoreSetup,
ElasticsearchClient,
Logger,
SharedGlobalConfig,
PluginInitializerContext,
} from 'kibana/server';
import url from 'url';
import { CloudSetup } from '../../cloud/server';
import { estypes } from '@elastic/elasticsearch';
import { MonitoringConfig } from './config';
import { PluginsSetup } from './types';
import { mbSafeQuery } from './lib/mb_safe_query';
type GetLogger = (...scopes: string[]) => Logger;
interface InitSetupOptions {
initializerContext: PluginInitializerContext;
coreSetup: CoreSetup;
config: MonitoringConfig;
getLogger: GetLogger;
log: Logger;
legacyConfig: SharedGlobalConfig;
setupPlugins: PluginsSetup;
}
export type EndpointTypes =
| 'search'
| 'msearch'
| 'transport.request'
| 'cluster.putSettings'
| 'cluster.getSettings'
| string;
export type ClientParams = estypes.SearchRequest | undefined;
interface IAppGlobals {
url: string;
isCloud: boolean;
monitoringCluster: ILegacyCustomClusterClient;
config: MonitoringConfig;
getLogger: GetLogger;
getKeyStoreValue: (key: string, storeValueMethod?: () => unknown) => unknown;
getLegacyClusterShim: (
client: ElasticsearchClient,
endpoint: EndpointTypes,
params: ClientParams
) => any;
}
interface KeyStoreData {
@ -37,22 +67,35 @@ const getKeyStoreValue = (key: string, storeValueMethod?: () => unknown) => {
export class Globals {
private static _app: IAppGlobals;
public static init(
coreSetup: CoreSetup,
cloud: CloudSetup | undefined,
monitoringCluster: ILegacyCustomClusterClient,
config: MonitoringConfig,
getLogger: GetLogger
) {
public static init(options: InitSetupOptions) {
const { coreSetup, setupPlugins, config, getLogger } = options;
const getLegacyClusterShim = async (
client: ElasticsearchClient,
endpoint: EndpointTypes,
params: ClientParams
): Promise<estypes.SearchResponse> =>
await mbSafeQuery(async () => {
const endpointMap: { [key: string]: (params: any) => any } = {
search: (p) => client.search(p),
msearch: (p) => client.msearch(p),
'transport.request': (p) => client.transport.request(p),
'cluster.getSettings': (p) => client.cluster.getSettings(p),
'cluster.putSettings': (p) => client.cluster.putSettings(p),
};
const { body } = await endpointMap[endpoint](params);
return body;
});
const { protocol, hostname, port } = coreSetup.http.getServerInfo();
const pathname = coreSetup.http.basePath.serverBasePath;
Globals._app = {
url: url.format({ protocol, hostname, port, pathname }),
isCloud: cloud?.isCloudEnabled || false,
monitoringCluster,
isCloud: setupPlugins.cloud?.isCloudEnabled || false,
config,
getLogger,
getKeyStoreValue,
getLegacyClusterShim,
};
}
@ -64,4 +107,6 @@ export class Globals {
}
return Globals._app;
}
public static stop() {}
}

View file

@ -5,6 +5,7 @@
* 2.0.
*/
import { ElasticsearchClient } from 'kibana/server';
import sinon from 'sinon';
import { getStackStats, getAllStats, handleAllStats } from './get_all_stats';
import { ESClusterStats } from './get_es_stats';
@ -13,7 +14,11 @@ import { LogstashStatsByClusterUuid } from './get_logstash_stats';
describe('get_all_stats', () => {
const timestamp = Date.now();
const callCluster = sinon.stub();
const searchMock = sinon.stub();
const callCluster = ({ search: searchMock } as unknown) as ElasticsearchClient;
afterEach(() => {
searchMock.reset();
});
const esClusters = [
{ cluster_uuid: 'a' },
@ -157,18 +162,20 @@ describe('get_all_stats', () => {
},
];
callCluster
.withArgs('search')
searchMock
.onCall(0)
.returns(Promise.resolve(esStatsResponse))
.returns(Promise.resolve({ body: esStatsResponse }))
.onCall(1)
.returns(Promise.resolve(kibanaStatsResponse))
.returns(Promise.resolve({ body: kibanaStatsResponse }))
.onCall(2)
.returns(Promise.resolve(logstashStatsResponse))
.returns(Promise.resolve({ body: logstashStatsResponse }))
.returns(Promise.resolve({ body: logstashStatsResponse }))
.onCall(3)
.returns(Promise.resolve({})) // Beats stats
.returns(Promise.resolve({ body: {} })) // Beats stats
.onCall(4)
.returns(Promise.resolve({})); // Beats state
.returns(Promise.resolve({ body: {} })) // Beats state
.onCall(5)
.returns(Promise.resolve({ body: {} })); // Logstash state
expect(await getAllStats(['a'], callCluster, timestamp, 1)).toStrictEqual(allClusters);
});
@ -178,7 +185,7 @@ describe('get_all_stats', () => {
aggregations: { cluster_uuids: { buckets: [] } },
};
callCluster.withArgs('search').returns(Promise.resolve(clusterUuidsResponse));
searchMock.returns(Promise.resolve({ body: clusterUuidsResponse }));
expect(await getAllStats([], callCluster, timestamp, 1)).toStrictEqual([]);
});

View file

@ -9,7 +9,7 @@ import { set } from '@elastic/safer-lodash-set';
import { get, merge } from 'lodash';
import moment from 'moment';
import { LegacyAPICaller } from 'kibana/server';
import { ElasticsearchClient } from 'kibana/server';
import {
LOGSTASH_SYSTEM_ID,
KIBANA_SYSTEM_ID,
@ -28,7 +28,7 @@ import { getLogstashStats, LogstashStatsByClusterUuid } from './get_logstash_sta
*/
export async function getAllStats(
clusterUuids: string[],
callCluster: LegacyAPICaller, // TODO: To be changed to the new ES client when the plugin migrates
callCluster: ElasticsearchClient,
timestamp: number,
maxBucketSize: number
) {

View file

@ -7,6 +7,7 @@
import { fetchBeatsStats, processResults } from './get_beats_stats';
import sinon from 'sinon';
import { ElasticsearchClient } from 'kibana/server';
// eslint-disable-next-line @typescript-eslint/no-var-requires
const beatsStatsResultSet = require('./__mocks__/fixtures/beats_stats_results');
@ -23,40 +24,41 @@ describe('Get Beats Stats', () => {
const clusterUuids = ['aCluster', 'bCluster', 'cCluster'];
const start = new Date().toISOString();
const end = new Date().toISOString();
let callCluster = sinon.stub();
const searchMock = sinon.stub();
const callCluster = ({ search: searchMock } as unknown) as ElasticsearchClient;
beforeEach(() => {
const getStub = { get: sinon.stub() };
getStub.get.withArgs('xpack.monitoring.beats.index_pattern').returns('beats-indices-*');
callCluster = sinon.stub();
searchMock.reset();
});
it('should set `from: 0, to: 10000` in the query', async () => {
searchMock.returns(Promise.resolve({ body: {} }));
await fetchBeatsStats(callCluster, clusterUuids, start, end, {} as any);
const { args } = callCluster.firstCall;
const [api, { body }] = args;
const { args } = searchMock.firstCall;
const [{ body }] = args;
expect(api).toEqual('search');
expect(body.from).toEqual(0);
expect(body.size).toEqual(10000);
});
it('should set `from: 10000, from: 10000` in the query', async () => {
searchMock.returns(Promise.resolve({ body: {} }));
await fetchBeatsStats(callCluster, clusterUuids, start, end, { page: 1 } as any);
const { args } = callCluster.firstCall;
const [api, { body }] = args;
const { args } = searchMock.firstCall;
const [{ body }] = args;
expect(api).toEqual('search');
expect(body.from).toEqual(10000);
expect(body.size).toEqual(10000);
});
it('should set `from: 20000, from: 10000` in the query', async () => {
searchMock.returns(Promise.resolve({ body: {} }));
await fetchBeatsStats(callCluster, clusterUuids, start, end, { page: 2 } as any);
const { args } = callCluster.firstCall;
const [api, { body }] = args;
const { args } = searchMock.firstCall;
const [{ body }] = args;
expect(api).toEqual('search');
expect(body.from).toEqual(20000);
expect(body.size).toEqual(10000);
});

View file

@ -7,7 +7,8 @@
import { get } from 'lodash';
import { SearchResponse } from 'elasticsearch';
import { LegacyAPICaller } from 'kibana/server';
import { ElasticsearchClient } from 'kibana/server';
import { estypes } from '@elastic/elasticsearch';
import { createQuery } from './create_query';
import { INDEX_PATTERN_BEATS } from '../../common/constants';
@ -319,17 +320,17 @@ export function processResults(
* @return {Promise}
*/
async function fetchBeatsByType(
callCluster: LegacyAPICaller,
callCluster: ElasticsearchClient,
clusterUuids: string[],
start: string,
end: string,
{ page = 0, ...options }: { page?: number } & BeatsProcessOptions,
type: string
): Promise<void> {
const params = {
const params: estypes.SearchRequest = {
index: INDEX_PATTERN_BEATS,
ignoreUnavailable: true,
filterPath: [
ignore_unavailable: true,
filter_path: [
'hits.hits._source.cluster_uuid',
'hits.hits._source.type',
'hits.hits._source.beats_stats.beat.version',
@ -353,7 +354,7 @@ async function fetchBeatsByType(
},
},
],
}),
}) as estypes.QueryDslQueryContainer,
from: page * HITS_SIZE,
collapse: { field: `${type}.beat.uuid` },
sort: [{ [`${type}.timestamp`]: { order: 'desc', unmapped_type: 'long' } }],
@ -361,11 +362,11 @@ async function fetchBeatsByType(
},
};
const results = await callCluster<SearchResponse<BeatsStats>>('search', params);
const { body: results } = await callCluster.search(params);
const hitsLength = results?.hits?.hits.length || 0;
if (hitsLength > 0) {
// further augment the clusters object with more stats
processResults(results, options);
processResults(results as SearchResponse<BeatsStats>, options);
if (hitsLength === HITS_SIZE) {
// call recursively
@ -383,7 +384,7 @@ async function fetchBeatsByType(
}
export async function fetchBeatsStats(
callCluster: LegacyAPICaller,
callCluster: ElasticsearchClient,
clusterUuids: string[],
start: string,
end: string,
@ -393,7 +394,7 @@ export async function fetchBeatsStats(
}
export async function fetchBeatsStates(
callCluster: LegacyAPICaller,
callCluster: ElasticsearchClient,
clusterUuids: string[],
start: string,
end: string,
@ -411,7 +412,7 @@ export interface BeatsStatsByClusterUuid {
* @return {Object} - Beats stats in an object keyed by the cluster UUIDs
*/
export async function getBeatsStats(
callCluster: LegacyAPICaller,
callCluster: ElasticsearchClient,
clusterUuids: string[],
start: string,
end: string

View file

@ -5,6 +5,7 @@
* 2.0.
*/
import { ElasticsearchClient } from 'kibana/server';
import sinon from 'sinon';
import {
getClusterUuids,
@ -13,40 +14,52 @@ import {
} from './get_cluster_uuids';
describe('get_cluster_uuids', () => {
const callCluster = sinon.stub();
const searchMock = sinon.stub();
const callCluster = ({ search: searchMock } as unknown) as ElasticsearchClient;
afterEach(() => {
searchMock.reset();
});
const response = {
aggregations: {
cluster_uuids: {
buckets: [{ key: 'abc' }, { key: 'xyz' }, { key: '123' }],
body: {
aggregations: {
cluster_uuids: {
buckets: [{ key: 'abc' }, { key: 'xyz' }, { key: '123' }],
},
},
},
};
const expectedUuids = response.aggregations.cluster_uuids.buckets.map((bucket) => bucket.key);
const expectedUuids = response.body.aggregations.cluster_uuids.buckets.map(
(bucket) => bucket.key
);
const timestamp = Date.now();
describe('getClusterUuids', () => {
it('returns cluster UUIDs', async () => {
callCluster.withArgs('search').returns(Promise.resolve(response));
searchMock.returns(Promise.resolve(response));
expect(await getClusterUuids(callCluster, timestamp, 1)).toStrictEqual(expectedUuids);
});
});
describe('fetchClusterUuids', () => {
it('searches for clusters', async () => {
callCluster.returns(Promise.resolve(response));
expect(await fetchClusterUuids(callCluster, timestamp, 1)).toStrictEqual(response);
searchMock.returns(Promise.resolve(response));
expect(await fetchClusterUuids(callCluster, timestamp, 1)).toStrictEqual(response.body);
});
});
describe('handleClusterUuidsResponse', () => {
// filterPath makes it easy to ignore anything unexpected because it will come back empty
// filter_path makes it easy to ignore anything unexpected because it will come back empty
it('handles unexpected response', () => {
const clusterUuids = handleClusterUuidsResponse({});
expect(clusterUuids.length).toStrictEqual(0);
});
it('handles valid response', () => {
const clusterUuids = handleClusterUuidsResponse(response);
const clusterUuids = handleClusterUuidsResponse(response.body);
expect(clusterUuids).toStrictEqual(expectedUuids);
});

View file

@ -7,7 +7,8 @@
import { get } from 'lodash';
import moment from 'moment';
import { LegacyAPICaller } from 'kibana/server';
import { ElasticsearchClient } from 'kibana/server';
import { estypes } from '@elastic/elasticsearch';
import { createQuery } from './create_query';
import {
INDEX_PATTERN_ELASTICSEARCH,
@ -18,7 +19,7 @@ import {
* Get a list of Cluster UUIDs that exist within the specified timespan.
*/
export async function getClusterUuids(
callCluster: LegacyAPICaller, // TODO: To be changed to the new ES client when the plugin migrates
callCluster: ElasticsearchClient,
timestamp: number,
maxBucketSize: number
) {
@ -30,7 +31,7 @@ export async function getClusterUuids(
* Fetch the aggregated Cluster UUIDs from the monitoring cluster.
*/
export async function fetchClusterUuids(
callCluster: LegacyAPICaller,
callCluster: ElasticsearchClient,
timestamp: number,
maxBucketSize: number
) {
@ -38,13 +39,13 @@ export async function fetchClusterUuids(
const end = moment(timestamp).toISOString();
const params = {
const params: estypes.SearchRequest = {
index: INDEX_PATTERN_ELASTICSEARCH,
size: 0,
ignoreUnavailable: true,
filterPath: 'aggregations.cluster_uuids.buckets.key',
ignore_unavailable: true,
filter_path: 'aggregations.cluster_uuids.buckets.key',
body: {
query: createQuery({ type: 'cluster_stats', start, end }),
query: createQuery({ type: 'cluster_stats', start, end }) as estypes.QueryDslQueryContainer,
aggs: {
cluster_uuids: {
terms: {
@ -56,7 +57,8 @@ export async function fetchClusterUuids(
},
};
return await callCluster('search', params);
const { body: response } = await callCluster.search(params);
return response;
}
/**

View file

@ -5,6 +5,7 @@
* 2.0.
*/
import { ElasticsearchClient } from 'kibana/server';
import sinon from 'sinon';
import {
fetchElasticsearchStats,
@ -13,8 +14,9 @@ import {
} from './get_es_stats';
describe('get_es_stats', () => {
const callWith = sinon.stub();
const response = {
const searchMock = sinon.stub();
const client = ({ search: searchMock } as unknown) as ElasticsearchClient;
const body = {
hits: {
hits: [
{ _id: 'abc', _source: { cluster_uuid: 'abc' } },
@ -23,15 +25,15 @@ describe('get_es_stats', () => {
],
},
};
const expectedClusters = response.hits.hits.map((hit) => hit._source);
const expectedClusters = body.hits.hits.map((hit) => hit._source);
const clusterUuids = expectedClusters.map((cluster) => cluster.cluster_uuid);
const maxBucketSize = 1;
describe('getElasticsearchStats', () => {
it('returns clusters', async () => {
callWith.withArgs('search').returns(Promise.resolve(response));
searchMock.returns(Promise.resolve({ body }));
expect(await getElasticsearchStats(callWith, clusterUuids, maxBucketSize)).toStrictEqual(
expect(await getElasticsearchStats(client, clusterUuids, maxBucketSize)).toStrictEqual(
expectedClusters
);
});
@ -39,16 +41,16 @@ describe('get_es_stats', () => {
describe('fetchElasticsearchStats', () => {
it('searches for clusters', async () => {
callWith.returns(response);
searchMock.returns({ body });
expect(await fetchElasticsearchStats(callWith, clusterUuids, maxBucketSize)).toStrictEqual(
response
expect(await fetchElasticsearchStats(client, clusterUuids, maxBucketSize)).toStrictEqual(
body
);
});
});
describe('handleElasticsearchStats', () => {
// filterPath makes it easy to ignore anything unexpected because it will come back empty
// filter_path makes it easy to ignore anything unexpected because it will come back empty
it('handles unexpected response', () => {
const clusters = handleElasticsearchStats({} as any);
@ -56,7 +58,7 @@ describe('get_es_stats', () => {
});
it('handles valid response', () => {
const clusters = handleElasticsearchStats(response as any);
const clusters = handleElasticsearchStats(body as any);
expect(clusters).toStrictEqual(expectedClusters);
});

View file

@ -6,7 +6,8 @@
*/
import { SearchResponse } from 'elasticsearch';
import { LegacyAPICaller } from 'kibana/server';
import { ElasticsearchClient } from 'kibana/server';
import { estypes } from '@elastic/elasticsearch';
import { INDEX_PATTERN_ELASTICSEARCH } from '../../common/constants';
/**
@ -17,7 +18,7 @@ import { INDEX_PATTERN_ELASTICSEARCH } from '../../common/constants';
* @param {Array} clusterUuids The string Cluster UUIDs to fetch details for
*/
export async function getElasticsearchStats(
callCluster: LegacyAPICaller,
callCluster: ElasticsearchClient,
clusterUuids: string[],
maxBucketSize: number
) {
@ -34,16 +35,16 @@ export async function getElasticsearchStats(
*
* Returns the response for the aggregations to fetch details for the product.
*/
export function fetchElasticsearchStats(
callCluster: LegacyAPICaller,
export async function fetchElasticsearchStats(
callCluster: ElasticsearchClient,
clusterUuids: string[],
maxBucketSize: number
) {
const params = {
const params: estypes.SearchRequest = {
index: INDEX_PATTERN_ELASTICSEARCH,
size: maxBucketSize,
ignoreUnavailable: true,
filterPath: [
ignore_unavailable: true,
filter_path: [
'hits.hits._source.cluster_uuid',
'hits.hits._source.timestamp',
'hits.hits._source.cluster_name',
@ -69,7 +70,8 @@ export function fetchElasticsearchStats(
},
};
return callCluster('search', params);
const { body: response } = await callCluster.search(params);
return response as SearchResponse<ESClusterStats>;
}
export interface ESClusterStats {

View file

@ -5,6 +5,7 @@
* 2.0.
*/
import { ElasticsearchClient } from 'kibana/server';
import sinon from 'sinon';
import {
fetchHighLevelStats,
@ -13,12 +14,13 @@ import {
} from './get_high_level_stats';
describe('get_high_level_stats', () => {
const callWith = sinon.stub();
const searchMock = sinon.stub();
const callCluster = ({ search: searchMock } as unknown) as ElasticsearchClient;
const product = 'xyz';
const cloudName = 'bare-metal';
const start = new Date().toISOString();
const end = new Date().toISOString();
const response = {
const body = {
hits: {
hits: [
{
@ -232,26 +234,26 @@ describe('get_high_level_stats', () => {
describe('getHighLevelStats', () => {
it('returns clusters', async () => {
callWith.withArgs('search').returns(Promise.resolve(response));
searchMock.returns(Promise.resolve({ body }));
expect(
await getHighLevelStats(callWith, clusterUuids, start, end, product, maxBucketSize)
await getHighLevelStats(callCluster, clusterUuids, start, end, product, maxBucketSize)
).toStrictEqual(expectedClusters);
});
});
describe('fetchHighLevelStats', () => {
it('searches for clusters', async () => {
callWith.returns(Promise.resolve(response));
searchMock.returns(Promise.resolve({ body }));
expect(
await fetchHighLevelStats(callWith, clusterUuids, start, end, product, maxBucketSize)
).toStrictEqual(response);
await fetchHighLevelStats(callCluster, clusterUuids, start, end, product, maxBucketSize)
).toStrictEqual(body);
});
});
describe('handleHighLevelStatsResponse', () => {
// filterPath makes it easy to ignore anything unexpected because it will come back empty
// filter_path makes it easy to ignore anything unexpected because it will come back empty
it('handles unexpected response', () => {
const clusters = handleHighLevelStatsResponse({} as any, product);
@ -259,7 +261,7 @@ describe('get_high_level_stats', () => {
});
it('handles valid response', () => {
const clusters = handleHighLevelStatsResponse(response as any, product);
const clusters = handleHighLevelStatsResponse(body as any, product);
expect(clusters).toStrictEqual(expectedClusters);
});

View file

@ -7,7 +7,8 @@
import { get } from 'lodash';
import { SearchResponse } from 'elasticsearch';
import { LegacyAPICaller } from 'kibana/server';
import { ElasticsearchClient } from 'kibana/server';
import { estypes } from '@elastic/elasticsearch';
import { createQuery } from './create_query';
import {
INDEX_PATTERN_KIBANA,
@ -248,7 +249,7 @@ function getIndexPatternForStackProduct(product: string) {
* Returns an object keyed by the cluster UUIDs to make grouping easier.
*/
export async function getHighLevelStats(
callCluster: LegacyAPICaller,
callCluster: ElasticsearchClient,
clusterUuids: string[],
start: string,
end: string,
@ -269,7 +270,7 @@ export async function getHighLevelStats(
export async function fetchHighLevelStats<
T extends { cluster_uuid?: string } = { cluster_uuid?: string }
>(
callCluster: LegacyAPICaller,
callCluster: ElasticsearchClient,
clusterUuids: string[],
start: string,
end: string,
@ -300,14 +301,11 @@ export async function fetchHighLevelStats<
filters.push(kibanaFilter);
}
const params = {
index: getIndexPatternForStackProduct(product),
const params: estypes.SearchRequest = {
index: getIndexPatternForStackProduct(product) as string,
size: maxBucketSize,
headers: {
'X-QUERY-SOURCE': TELEMETRY_QUERY_SOURCE,
},
ignoreUnavailable: true,
filterPath: [
ignore_unavailable: true,
filter_path: [
'hits.hits._source.cluster_uuid',
`hits.hits._source.${product}_stats.${product}.version`,
`hits.hits._source.${product}_stats.os`,
@ -325,7 +323,7 @@ export async function fetchHighLevelStats<
end,
type: `${product}_stats`,
filters,
}),
}) as estypes.QueryDslQueryContainer,
collapse: {
// a more ideal field would be the concatenation of the uuid + transport address for duped UUIDs (copied installations)
field: `${product}_stats.${product}.uuid`,
@ -334,7 +332,12 @@ export async function fetchHighLevelStats<
},
};
return callCluster('search', params);
const { body: response } = await callCluster.search(params, {
headers: {
'X-QUERY-SOURCE': TELEMETRY_QUERY_SOURCE,
},
});
return response as SearchResponse<T>;
}
/**

View file

@ -8,7 +8,7 @@
import moment from 'moment';
import { isEmpty } from 'lodash';
import { SearchResponse } from 'elasticsearch';
import { LegacyAPICaller } from 'kibana/server';
import { ElasticsearchClient } from 'kibana/server';
import { KIBANA_SYSTEM_ID, TELEMETRY_COLLECTION_INTERVAL } from '../../common/constants';
import {
fetchHighLevelStats,
@ -183,7 +183,7 @@ export function ensureTimeSpan(
* specialized usage data that comes with kibana stats (kibana_stats.usage).
*/
export async function getKibanaStats(
callCluster: LegacyAPICaller,
callCluster: ElasticsearchClient,
clusterUuids: string[],
start: string,
end: string,

View file

@ -5,12 +5,14 @@
* 2.0.
*/
import { ElasticsearchClient } from 'kibana/server';
import sinon from 'sinon';
import { getLicenses, handleLicenses, fetchLicenses } from './get_licenses';
describe('get_licenses', () => {
const callWith = sinon.stub();
const response = {
const searchMock = sinon.stub();
const client = ({ search: searchMock } as unknown) as ElasticsearchClient;
const body = {
hits: {
hits: [
{ _id: 'abc', _source: { cluster_uuid: 'abc', license: { type: 'basic' } } },
@ -19,7 +21,7 @@ describe('get_licenses', () => {
],
},
};
const expectedClusters = response.hits.hits.map((hit) => hit._source);
const expectedClusters = body.hits.hits.map((hit) => hit._source);
const clusterUuids = expectedClusters.map((cluster) => cluster.cluster_uuid);
const expectedLicenses = {
abc: { type: 'basic' },
@ -29,22 +31,22 @@ describe('get_licenses', () => {
describe('getLicenses', () => {
it('returns clusters', async () => {
callWith.withArgs('search').returns(Promise.resolve(response));
searchMock.returns(Promise.resolve({ body }));
expect(await getLicenses(clusterUuids, callWith, 1)).toStrictEqual(expectedLicenses);
expect(await getLicenses(clusterUuids, client, 1)).toStrictEqual(expectedLicenses);
});
});
describe('fetchLicenses', () => {
it('searches for clusters', async () => {
callWith.returns(response);
searchMock.returns({ body });
expect(await fetchLicenses(callWith, clusterUuids, 1)).toStrictEqual(response);
expect(await fetchLicenses(client, clusterUuids, 1)).toStrictEqual(body);
});
});
describe('handleLicenses', () => {
// filterPath makes it easy to ignore anything unexpected because it will come back empty
// filter_path makes it easy to ignore anything unexpected because it will come back empty
it('handles unexpected response', () => {
const clusters = handleLicenses({} as any);
@ -52,7 +54,7 @@ describe('get_licenses', () => {
});
it('handles valid response', () => {
const clusters = handleLicenses(response as any);
const clusters = handleLicenses(body as any);
expect(clusters).toStrictEqual(expectedLicenses);
});

View file

@ -6,7 +6,8 @@
*/
import { SearchResponse } from 'elasticsearch';
import { LegacyAPICaller } from 'kibana/server';
import { ElasticsearchClient } from 'kibana/server';
import { estypes } from '@elastic/elasticsearch';
import { ESLicense } from '../../../telemetry_collection_xpack/server';
import { INDEX_PATTERN_ELASTICSEARCH } from '../../common/constants';
@ -15,7 +16,7 @@ import { INDEX_PATTERN_ELASTICSEARCH } from '../../common/constants';
*/
export async function getLicenses(
clusterUuids: string[],
callCluster: LegacyAPICaller, // TODO: To be changed to the new ES client when the plugin migrates
callCluster: ElasticsearchClient, // TODO: To be changed to the new ES client when the plugin migrates
maxBucketSize: number
): Promise<{ [clusterUuid: string]: ESLicense | undefined }> {
const response = await fetchLicenses(callCluster, clusterUuids, maxBucketSize);
@ -31,16 +32,16 @@ export async function getLicenses(
*
* Returns the response for the aggregations to fetch details for the product.
*/
export function fetchLicenses(
callCluster: LegacyAPICaller,
export async function fetchLicenses(
callCluster: ElasticsearchClient,
clusterUuids: string[],
maxBucketSize: number
) {
const params = {
const params: estypes.SearchRequest = {
index: INDEX_PATTERN_ELASTICSEARCH,
size: maxBucketSize,
ignoreUnavailable: true,
filterPath: ['hits.hits._source.cluster_uuid', 'hits.hits._source.license'],
ignore_unavailable: true,
filter_path: ['hits.hits._source.cluster_uuid', 'hits.hits._source.license'],
body: {
query: {
bool: {
@ -59,7 +60,8 @@ export function fetchLicenses(
},
};
return callCluster('search', params);
const { body: response } = await callCluster.search(params);
return response as SearchResponse<ESClusterStatsWithLicense>;
}
export interface ESClusterStatsWithLicense {

View file

@ -12,6 +12,7 @@ import {
processLogstashStateResults,
} from './get_logstash_stats';
import sinon from 'sinon';
import { ElasticsearchClient } from 'kibana/server';
// eslint-disable-next-line @typescript-eslint/no-var-requires
const logstashStatsResultSet = require('./__mocks__/fixtures/logstash_stats_results');
@ -33,10 +34,15 @@ const getBaseOptions = () => ({
describe('Get Logstash Stats', () => {
const clusterUuids = ['aCluster', 'bCluster', 'cCluster'];
let callCluster = sinon.stub();
const searchMock = sinon.stub();
const callCluster = ({ search: searchMock } as unknown) as ElasticsearchClient;
beforeEach(() => {
callCluster = sinon.stub();
searchMock.returns(Promise.resolve({ body: {} }));
});
afterEach(() => {
searchMock.reset();
});
describe('fetchLogstashState', () => {
@ -63,17 +69,15 @@ describe('Get Logstash Stats', () => {
};
await fetchLogstashState(callCluster, clusterUuid, ephemeralIds, {} as any);
const { args } = callCluster.firstCall;
const [api, { body }] = args;
expect(api).toEqual('search');
const { args } = searchMock.firstCall;
const [{ body }] = args;
expect(body.query).toEqual(expected);
});
it('should set `from: 0, to: 10000` in the query', async () => {
await fetchLogstashState(callCluster, clusterUuid, ephemeralIds, {} as any);
const { args } = callCluster.firstCall;
const [api, { body }] = args;
expect(api).toEqual('search');
const { args } = searchMock.firstCall;
const [{ body }] = args;
expect(body.from).toEqual(0);
expect(body.size).toEqual(10000);
});
@ -82,10 +86,9 @@ describe('Get Logstash Stats', () => {
await fetchLogstashState(callCluster, clusterUuid, ephemeralIds, {
page: 1,
} as any);
const { args } = callCluster.firstCall;
const [api, { body }] = args;
const { args } = searchMock.firstCall;
const [{ body }] = args;
expect(api).toEqual('search');
expect(body.from).toEqual(10000);
expect(body.size).toEqual(10000);
});
@ -94,10 +97,9 @@ describe('Get Logstash Stats', () => {
await fetchLogstashState(callCluster, clusterUuid, ephemeralIds, {
page: 2,
} as any);
const { args } = callCluster.firstCall;
const [api, { body }] = args;
const { args } = searchMock.firstCall;
const [{ body }] = args;
expect(api).toEqual('search');
expect(body.from).toEqual(20000);
expect(body.size).toEqual(10000);
});
@ -106,30 +108,27 @@ describe('Get Logstash Stats', () => {
describe('fetchLogstashStats', () => {
it('should set `from: 0, to: 10000` in the query', async () => {
await fetchLogstashStats(callCluster, clusterUuids, {} as any);
const { args } = callCluster.firstCall;
const [api, { body }] = args;
const { args } = searchMock.firstCall;
const [{ body }] = args;
expect(api).toEqual('search');
expect(body.from).toEqual(0);
expect(body.size).toEqual(10000);
});
it('should set `from: 10000, to: 10000` in the query', async () => {
await fetchLogstashStats(callCluster, clusterUuids, { page: 1 } as any);
const { args } = callCluster.firstCall;
const [api, { body }] = args;
const { args } = searchMock.firstCall;
const [{ body }] = args;
expect(api).toEqual('search');
expect(body.from).toEqual(10000);
expect(body.size).toEqual(10000);
});
it('should set `from: 20000, to: 10000` in the query', async () => {
await fetchLogstashStats(callCluster, clusterUuids, { page: 2 } as any);
const { args } = callCluster.firstCall;
const [api, { body }] = args;
const { args } = searchMock.firstCall;
const [{ body }] = args;
expect(api).toEqual('search');
expect(body.from).toEqual(20000);
expect(body.size).toEqual(10000);
});

View file

@ -6,7 +6,8 @@
*/
import { SearchResponse } from 'elasticsearch';
import { LegacyAPICaller } from 'kibana/server';
import { ElasticsearchClient } from 'kibana/server';
import { estypes } from '@elastic/elasticsearch';
import { createQuery } from './create_query';
import { mapToList } from './get_high_level_stats';
import { incrementByKey } from './get_high_level_stats';
@ -261,17 +262,14 @@ export function processLogstashStateResults(
}
export async function fetchLogstashStats(
callCluster: LegacyAPICaller,
callCluster: ElasticsearchClient,
clusterUuids: string[],
{ page = 0, ...options }: { page?: number } & LogstashProcessOptions
): Promise<void> {
const params = {
headers: {
'X-QUERY-SOURCE': TELEMETRY_QUERY_SOURCE,
},
const params: estypes.SearchRequest = {
index: INDEX_PATTERN_LOGSTASH,
ignoreUnavailable: true,
filterPath: [
ignore_unavailable: true,
filter_path: [
'hits.hits._source.cluster_uuid',
'hits.hits._source.type',
'hits.hits._source.source_node',
@ -295,7 +293,7 @@ export async function fetchLogstashStats(
},
},
],
}),
}) as estypes.QueryDslQueryContainer,
from: page * HITS_SIZE,
collapse: { field: 'logstash_stats.logstash.uuid' },
sort: [{ ['logstash_stats.timestamp']: { order: 'desc', unmapped_type: 'long' } }],
@ -303,12 +301,16 @@ export async function fetchLogstashStats(
},
};
const results = await callCluster<SearchResponse<LogstashStats>>('search', params);
const { body: results } = await callCluster.search(params, {
headers: {
'X-QUERY-SOURCE': TELEMETRY_QUERY_SOURCE,
},
});
const hitsLength = results?.hits?.hits.length || 0;
if (hitsLength > 0) {
// further augment the clusters object with more stats
processStatsResults(results, options);
processStatsResults(results as SearchResponse<LogstashStats>, options);
if (hitsLength === HITS_SIZE) {
// call recursively
@ -325,18 +327,15 @@ export async function fetchLogstashStats(
}
export async function fetchLogstashState(
callCluster: LegacyAPICaller,
callCluster: ElasticsearchClient,
clusterUuid: string,
ephemeralIds: string[],
{ page = 0, ...options }: { page?: number } & LogstashProcessOptions
): Promise<void> {
const params = {
headers: {
'X-QUERY-SOURCE': TELEMETRY_QUERY_SOURCE,
},
const params: estypes.SearchRequest = {
index: INDEX_PATTERN_LOGSTASH,
ignoreUnavailable: true,
filterPath: [
ignore_unavailable: true,
filter_path: [
'hits.hits._source.logstash_state.pipeline.batch_size',
'hits.hits._source.logstash_state.pipeline.workers',
'hits.hits._source.logstash_state.pipeline.representation.graph.vertices.config_name',
@ -355,7 +354,7 @@ export async function fetchLogstashState(
},
},
],
}),
}) as estypes.QueryDslQueryContainer,
from: page * HITS_SIZE,
collapse: { field: 'logstash_state.pipeline.ephemeral_id' },
sort: [{ ['timestamp']: { order: 'desc', unmapped_type: 'long' } }],
@ -363,11 +362,16 @@ export async function fetchLogstashState(
},
};
const results = await callCluster<SearchResponse<LogstashState>>('search', params);
const { body: results } = await callCluster.search<SearchResponse<LogstashState>>(params, {
headers: {
'X-QUERY-SOURCE': TELEMETRY_QUERY_SOURCE,
},
});
const hitsLength = results?.hits?.hits.length || 0;
if (hitsLength > 0) {
// further augment the clusters object with more stats
processLogstashStateResults(results, clusterUuid, options);
processLogstashStateResults(results as SearchResponse<LogstashState>, clusterUuid, options);
if (hitsLength === HITS_SIZE) {
// call recursively
@ -392,7 +396,7 @@ export interface LogstashStatsByClusterUuid {
* @return {Object} - Logstash stats in an object keyed by the cluster UUIDs
*/
export async function getLogstashStats(
callCluster: LegacyAPICaller,
callCluster: ElasticsearchClient,
clusterUuids: string[]
): Promise<LogstashStatsByClusterUuid> {
const options: LogstashProcessOptions = {

View file

@ -5,7 +5,7 @@
* 2.0.
*/
import type { ILegacyClusterClient } from 'kibana/server';
import type { IClusterClient } from 'kibana/server';
import type { UsageCollectionSetup } from 'src/plugins/usage_collection/server';
import type { UsageStatsPayload } from '../../../../../src/plugins/telemetry_collection_manager/server';
import type { LogstashBaseStats } from './get_logstash_stats';
@ -31,7 +31,7 @@ interface MonitoringTelemetryUsage {
export function registerMonitoringTelemetryCollection(
usageCollection: UsageCollectionSetup,
legacyEsClient: ILegacyClusterClient,
getClient: () => IClusterClient,
maxBucketSize: number
) {
const monitoringStatsCollector = usageCollection.makeStatsCollector<
@ -137,15 +137,13 @@ export function registerMonitoringTelemetryCollection(
},
},
},
fetch: async ({ kibanaRequest }) => {
fetch: async ({ kibanaRequest, esClient }) => {
const timestamp = Date.now(); // Collect the telemetry from the monitoring indices for this moment.
// NOTE: Usually, the monitoring indices index stats for each product every 10s (by default).
// However, some data may be delayed up-to 24h because monitoring only collects extended Kibana stats in that interval
// to avoid overloading of the system when retrieving data from the collectors (that delay is dealt with in the Kibana Stats getter inside the `getAllStats` method).
// By 8.x, we expect to stop collecting the Kibana extended stats and keep only the monitoring-related metrics.
const callCluster = kibanaRequest
? legacyEsClient.asScoped(kibanaRequest).callAsCurrentUser
: legacyEsClient.callAsInternalUser;
const callCluster = kibanaRequest ? esClient : getClient().asInternalUser;
const clusterDetails = await getClusterUuids(callCluster, timestamp, maxBucketSize);
const [licenses, stats] = await Promise.all([
getLicenses(clusterDetails, callCluster, maxBucketSize),

View file

@ -8,9 +8,8 @@
import { Observable } from 'rxjs';
import type {
IRouter,
ILegacyClusterClient,
Logger,
ILegacyCustomClusterClient,
ICustomClusterClient,
RequestHandlerContext,
ElasticsearchClient,
} from 'kibana/server';
@ -71,7 +70,7 @@ export interface MonitoringCoreConfig {
}
export interface RouteDependencies {
cluster: ILegacyCustomClusterClient;
cluster: ICustomClusterClient;
router: IRouter<RequestHandlerContextMonitoringPlugin>;
licenseService: MonitoringLicenseService;
encryptedSavedObjects?: EncryptedSavedObjectsPluginSetup;
@ -87,7 +86,7 @@ export interface MonitoringCore {
export interface LegacyShimDependencies {
router: IRouter<RequestHandlerContextMonitoringPlugin>;
instanceUuid: string;
esDataClient: ILegacyClusterClient;
esDataClient: ElasticsearchClient;
kibanaStatsCollector: any;
}