[Infra UI] Use new snapshot endpoint (#34938)

* Use new snapshot endpoint

* Remove old nodes endpoint

* Reintroduce NAME_FIELDS for displayable names.

* Use camelCase consistently.

* Distinguish node name and node id correctly.

* Adjust functional tests.

* Make prettier.

* Use exact same date histogram as before.

* Enable test for metric values again.

* Add test for new groupBy behaviour.

* Add 'Service Type' to groupBy fields.

* Fix date histogram offset and adjust tests.

* Always query for all metrics.
This commit is contained in:
Sonja Krause-Harder 2019-04-30 10:32:55 +02:00 committed by GitHub
parent ee7a86a046
commit 87b0fd1158
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
66 changed files with 276 additions and 2465 deletions

View file

@ -36,8 +36,6 @@ export interface InfraSource {
logSummaryBetween: InfraLogSummaryInterval;
logItem: InfraLogItem;
/** A hierarchy of hosts, pods, containers, services or arbitrary groups */
map?: InfraResponse | null;
/** A snapshot of nodes */
snapshot?: InfraSnapshotResponse | null;
@ -199,32 +197,6 @@ export interface InfraLogItemField {
value: string;
}
export interface InfraResponse {
nodes: InfraNode[];
}
export interface InfraNode {
path: InfraNodePath[];
metric: InfraNodeMetric;
}
export interface InfraNodePath {
value: string;
label: string;
}
export interface InfraNodeMetric {
name: InfraMetricType;
value: number;
avg: number;
max: number;
}
export interface InfraSnapshotResponse {
/** Nodes of type host, container or pod grouped by 0, 1 or 2 terms */
nodes: InfraSnapshotNode[];
@ -313,29 +285,6 @@ export interface InfraTimerangeInput {
from: number;
}
export interface InfraPathInput {
/** The type of path */
type: InfraPathType;
/** The label to use in the results for the group by for the terms group by */
label?: string | null;
/** The field to group by from a terms aggregation, this is ignored by the filter type */
field?: string | null;
/** The fitlers for the filter group by */
filters?: InfraPathFilterInput[] | null;
}
/** A group by filter */
export interface InfraPathFilterInput {
/** The label for the filter, this will be used as the group name in the final results */
label: string;
/** The query string query */
query: string;
}
export interface InfraMetricInput {
/** The type of metric */
type: InfraMetricType;
}
export interface InfraSnapshotGroupbyInput {
/** The label to use in the results for the group by for the terms group by */
label?: string | null;
@ -463,11 +412,6 @@ export interface LogSummaryBetweenInfraSourceArgs {
export interface LogItemInfraSourceArgs {
id: string;
}
export interface MapInfraSourceArgs {
timerange: InfraTimerangeInput;
filterQuery?: string | null;
}
export interface SnapshotInfraSourceArgs {
timerange: InfraTimerangeInput;
@ -485,11 +429,6 @@ export interface MetricsInfraSourceArgs {
export interface IndexFieldsInfraSourceStatusArgs {
indexType?: InfraIndexType | null;
}
export interface NodesInfraResponseArgs {
path: InfraPathInput[];
metric: InfraMetricInput;
}
export interface NodesInfraSnapshotResponseArgs {
type: InfraNodeType;
@ -530,25 +469,6 @@ export enum InfraNodeType {
host = 'host',
}
export enum InfraPathType {
terms = 'terms',
filters = 'filters',
hosts = 'hosts',
pods = 'pods',
containers = 'containers',
custom = 'custom',
}
export enum InfraMetricType {
count = 'count',
cpu = 'cpu',
load = 'load',
memory = 'memory',
tx = 'tx',
rx = 'rx',
logRate = 'logRate',
}
export enum InfraSnapshotMetricType {
count = 'count',
cpu = 'cpu',
@ -589,14 +509,6 @@ export enum InfraMetric {
nginxRequestsPerConnection = 'nginxRequestsPerConnection',
}
export enum InfraOperator {
gt = 'gt',
gte = 'gte',
lt = 'lt',
lte = 'lte',
eq = 'eq',
}
// ====================================================
// Unions
// ====================================================
@ -872,8 +784,9 @@ export namespace WaffleNodesQuery {
sourceId: string;
timerange: InfraTimerangeInput;
filterQuery?: string | null;
metric: InfraMetricInput;
path: InfraPathInput[];
metric: InfraSnapshotMetricInput;
groupBy: InfraSnapshotGroupbyInput[];
type: InfraNodeType;
};
export type Query = {
@ -887,17 +800,17 @@ export namespace WaffleNodesQuery {
id: string;
map?: Map | null;
snapshot?: Snapshot | null;
};
export type Map = {
__typename?: 'InfraResponse';
export type Snapshot = {
__typename?: 'InfraSnapshotResponse';
nodes: Nodes[];
};
export type Nodes = {
__typename?: 'InfraNode';
__typename?: 'InfraSnapshotNode';
path: Path[];
@ -905,7 +818,7 @@ export namespace WaffleNodesQuery {
};
export type Path = {
__typename?: 'InfraNodePath';
__typename?: 'InfraSnapshotNodePath';
value: string;
@ -913,15 +826,15 @@ export namespace WaffleNodesQuery {
};
export type Metric = {
__typename?: 'InfraNodeMetric';
__typename?: 'InfraSnapshotNodeMetric';
name: InfraMetricType;
name: InfraSnapshotMetricType;
value: number;
value?: number | null;
avg: number;
avg?: number | null;
max: number;
max?: number | null;
};
}

View file

@ -11,8 +11,8 @@ import React from 'react';
import euiStyled from '../../../../../common/eui_styled_components';
import {
InfraMetricType,
InfraNode,
InfraSnapshotMetricType,
InfraSnapshotNode,
InfraNodeType,
InfraTimerangeInput,
} from '../../graphql/types';
@ -28,7 +28,7 @@ import { TableView } from './table';
interface Props {
options: InfraWaffleMapOptions;
nodeType: InfraNodeType;
nodes: InfraNode[];
nodes: InfraSnapshotNode[];
loading: boolean;
reload: () => void;
onDrilldown: (filter: KueryFilterQuery) => void;
@ -51,24 +51,24 @@ interface MetricFormatters {
}
const METRIC_FORMATTERS: MetricFormatters = {
[InfraMetricType.count]: { formatter: InfraFormatterType.number, template: '{{value}}' },
[InfraMetricType.cpu]: {
[InfraSnapshotMetricType.count]: { formatter: InfraFormatterType.number, template: '{{value}}' },
[InfraSnapshotMetricType.cpu]: {
formatter: InfraFormatterType.percent,
template: '{{value}}',
},
[InfraMetricType.memory]: {
[InfraSnapshotMetricType.memory]: {
formatter: InfraFormatterType.percent,
template: '{{value}}',
},
[InfraMetricType.rx]: { formatter: InfraFormatterType.bits, template: '{{value}}/s' },
[InfraMetricType.tx]: { formatter: InfraFormatterType.bits, template: '{{value}}/s' },
[InfraMetricType.logRate]: {
[InfraSnapshotMetricType.rx]: { formatter: InfraFormatterType.bits, template: '{{value}}/s' },
[InfraSnapshotMetricType.tx]: { formatter: InfraFormatterType.bits, template: '{{value}}/s' },
[InfraSnapshotMetricType.logRate]: {
formatter: InfraFormatterType.abbreviatedNumber,
template: '{{value}}/s',
},
};
const calculateBoundsFromNodes = (nodes: InfraNode[]): InfraWaffleMapBounds => {
const calculateBoundsFromNodes = (nodes: InfraSnapshotNode[]): InfraWaffleMapBounds => {
const maxValues = nodes.map(node => node.metric.max);
const minValues = nodes.map(node => node.metric.value);
// if there is only one value then we need to set the bottom range to zero for min
@ -187,7 +187,7 @@ export const NodesOverview = injectI18n(
const metricFormatter = get(
METRIC_FORMATTERS,
metric.type,
METRIC_FORMATTERS[InfraMetricType.count]
METRIC_FORMATTERS[InfraSnapshotMetricType.count]
);
if (val == null) {
return '';

View file

@ -8,15 +8,19 @@ import { EuiButtonEmpty, EuiInMemoryTable, EuiToolTip } from '@elastic/eui';
import { InjectedIntl, injectI18n } from '@kbn/i18n/react';
import { last } from 'lodash';
import React from 'react';
import { InfraNodeType } from '../../../server/lib/adapters/nodes';
import { createWaffleMapNode } from '../../containers/waffle/nodes_to_wafflemap';
import { InfraNode, InfraNodePath, InfraTimerangeInput } from '../../graphql/types';
import {
InfraSnapshotNode,
InfraSnapshotNodePath,
InfraTimerangeInput,
InfraNodeType,
} from '../../graphql/types';
import { InfraWaffleMapNode, InfraWaffleMapOptions } from '../../lib/lib';
import { fieldToName } from '../waffle/lib/field_to_display_name';
import { NodeContextMenu } from '../waffle/node_context_menu';
interface Props {
nodes: InfraNode[];
nodes: InfraSnapshotNode[];
nodeType: InfraNodeType;
options: InfraWaffleMapOptions;
formatter: (subject: string | number) => string;
@ -31,7 +35,7 @@ const initialState = {
type State = Readonly<typeof initialState>;
const getGroupPaths = (path: InfraNodePath[]) => {
const getGroupPaths = (path: InfraSnapshotNodePath[]) => {
switch (path.length) {
case 3:
return path.slice(0, 2);

View file

@ -7,7 +7,6 @@ import { EuiLink, EuiToolTip } from '@elastic/eui';
import React from 'react';
import euiStyled from '../../../../../common/eui_styled_components';
import { InfraPathType } from '../../graphql/types';
import { InfraWaffleMapGroup, InfraWaffleMapOptions } from '../../lib/lib';
interface Props {
@ -47,15 +46,7 @@ export class GroupName extends React.PureComponent<Props, {}> {
return;
}
const currentPath = this.props.isChild && groupBy.length > 1 ? groupBy[1] : groupBy[0];
if (currentPath.type === InfraPathType.terms && currentPath.field) {
this.props.onDrilldown(`${currentPath.field}: "${this.props.group.name}"`);
}
if (currentPath.type === InfraPathType.filters && currentPath.filters) {
const currentFilter = currentPath.filters.find(f => f.label === this.props.group.name);
if (currentFilter) {
this.props.onDrilldown(currentFilter.query);
}
}
this.props.onDrilldown(`${currentPath.field}: "${this.props.group.name}"`);
};
}

View file

@ -40,6 +40,10 @@ export const fieldToName = (field: string, intl: InjectedIntl) => {
id: 'xpack.infra.groupByDisplayNames.provider',
defaultMessage: 'Cloud Provider',
}),
'service.type': intl.formatMessage({
id: 'xpack.infra.groupByDisplayNames.serviceType',
defaultMessage: 'Service Type',
}),
};
return LOOKUP[field] || field;
};

View file

@ -11,7 +11,7 @@ import {
isWaffleMapGroupWithGroups,
isWaffleMapGroupWithNodes,
} from '../../containers/waffle/type_guards';
import { InfraNode, InfraNodeType, InfraTimerangeInput } from '../../graphql/types';
import { InfraSnapshotNode, InfraNodeType, InfraTimerangeInput } from '../../graphql/types';
import { InfraWaffleMapBounds, InfraWaffleMapOptions } from '../../lib/lib';
import { AutoSizer } from '../auto_sizer';
import { GroupOfGroups } from './group_of_groups';
@ -20,7 +20,7 @@ import { Legend } from './legend';
import { applyWaffleMapLayout } from './lib/apply_wafflemap_layout';
interface Props {
nodes: InfraNode[];
nodes: InfraSnapshotNode[];
nodeType: InfraNodeType;
options: InfraWaffleMapOptions;
formatter: (subject: string | number) => string;

View file

@ -11,8 +11,7 @@ import React from 'react';
import { InjectedIntl, injectI18n } from '@kbn/i18n/react';
import euiStyled from '../../../../../common/eui_styled_components';
import { InfraNodeType } from '../../../server/lib/adapters/nodes';
import { InfraTimerangeInput } from '../../graphql/types';
import { InfraTimerangeInput, InfraNodeType } from '../../graphql/types';
import { InfraWaffleMapBounds, InfraWaffleMapNode, InfraWaffleMapOptions } from '../../lib/lib';
import { colorFromValue } from './lib/color_from_value';
import { NodeContextMenu } from './node_context_menu';

View file

@ -15,15 +15,15 @@ import {
} from '@elastic/eui';
import { FormattedMessage, InjectedIntl, injectI18n } from '@kbn/i18n/react';
import React from 'react';
import { InfraIndexField, InfraNodeType, InfraPathInput, InfraPathType } from '../../graphql/types';
import { InfraIndexField, InfraNodeType, InfraSnapshotGroupbyInput } from '../../graphql/types';
import { InfraGroupByOptions } from '../../lib/lib';
import { CustomFieldPanel } from './custom_field_panel';
import { fieldToName } from './lib/field_to_display_name';
interface Props {
nodeType: InfraNodeType;
groupBy: InfraPathInput[];
onChange: (groupBy: InfraPathInput[]) => void;
groupBy: InfraSnapshotGroupbyInput[];
onChange: (groupBy: InfraSnapshotGroupbyInput[]) => void;
onChangeCustomOptions: (options: InfraGroupByOptions[]) => void;
fields: InfraIndexField[];
intl: InjectedIntl;
@ -32,7 +32,6 @@ interface Props {
const createFieldToOptionMapper = (intl: InjectedIntl) => (field: string) => ({
text: fieldToName(field, intl),
type: InfraPathType.terms,
field,
});
@ -40,23 +39,27 @@ let OPTIONS: { [P in InfraNodeType]: InfraGroupByOptions[] };
const getOptions = (
nodeType: InfraNodeType,
intl: InjectedIntl
): Array<{ text: string; type: InfraPathType; field: string }> => {
): Array<{ text: string; field: string }> => {
if (!OPTIONS) {
const mapFieldToOption = createFieldToOptionMapper(intl);
OPTIONS = {
[InfraNodeType.pod]: ['kubernetes.namespace', 'kubernetes.node.name'].map(mapFieldToOption),
[InfraNodeType.pod]: ['kubernetes.namespace', 'kubernetes.node.name', 'service.type'].map(
mapFieldToOption
),
[InfraNodeType.container]: [
'host.name',
'cloud.availability_zone',
'cloud.machine.type',
'cloud.project.id',
'cloud.provider',
'service.type',
].map(mapFieldToOption),
[InfraNodeType.host]: [
'cloud.availability_zone',
'cloud.machine.type',
'cloud.project.id',
'cloud.provider',
'service.type',
].map(mapFieldToOption),
};
}
@ -190,7 +193,6 @@ export const WaffleGroupByControls = injectI18n(
{
text: field,
field,
type: InfraPathType.custom,
},
];
this.props.onChangeCustomOptions(options);
@ -203,7 +205,7 @@ export const WaffleGroupByControls = injectI18n(
if (groupBy.some(g => g.field === field)) {
this.handleRemove(field)();
} else if (this.props.groupBy.length < 2) {
this.props.onChange([...groupBy, { type: InfraPathType.terms, field }]);
this.props.onChange([...groupBy, { field }]);
this.handleClose();
}
};

View file

@ -14,20 +14,24 @@ import {
import { FormattedMessage, InjectedIntl, injectI18n } from '@kbn/i18n/react';
import React from 'react';
import { InfraMetricInput, InfraMetricType, InfraNodeType } from '../../graphql/types';
import {
InfraSnapshotMetricInput,
InfraSnapshotMetricType,
InfraNodeType,
} from '../../graphql/types';
interface Props {
nodeType: InfraNodeType;
metric: InfraMetricInput;
onChange: (metric: InfraMetricInput) => void;
metric: InfraSnapshotMetricInput;
onChange: (metric: InfraSnapshotMetricInput) => void;
intl: InjectedIntl;
}
let OPTIONS: { [P in InfraNodeType]: Array<{ text: string; value: InfraMetricType }> };
let OPTIONS: { [P in InfraNodeType]: Array<{ text: string; value: InfraSnapshotMetricType }> };
const getOptions = (
nodeType: InfraNodeType,
intl: InjectedIntl
): Array<{ text: string; value: InfraMetricType }> => {
): Array<{ text: string; value: InfraSnapshotMetricType }> => {
if (!OPTIONS) {
const CPUUsage = intl.formatMessage({
id: 'xpack.infra.waffle.metricOptions.cpuUsageText',
@ -53,69 +57,69 @@ const getOptions = (
[InfraNodeType.pod]: [
{
text: CPUUsage,
value: InfraMetricType.cpu,
value: InfraSnapshotMetricType.cpu,
},
{
text: MemoryUsage,
value: InfraMetricType.memory,
value: InfraSnapshotMetricType.memory,
},
{
text: InboundTraffic,
value: InfraMetricType.rx,
value: InfraSnapshotMetricType.rx,
},
{
text: OutboundTraffic,
value: InfraMetricType.tx,
value: InfraSnapshotMetricType.tx,
},
],
[InfraNodeType.container]: [
{
text: CPUUsage,
value: InfraMetricType.cpu,
value: InfraSnapshotMetricType.cpu,
},
{
text: MemoryUsage,
value: InfraMetricType.memory,
value: InfraSnapshotMetricType.memory,
},
{
text: InboundTraffic,
value: InfraMetricType.rx,
value: InfraSnapshotMetricType.rx,
},
{
text: OutboundTraffic,
value: InfraMetricType.tx,
value: InfraSnapshotMetricType.tx,
},
],
[InfraNodeType.host]: [
{
text: CPUUsage,
value: InfraMetricType.cpu,
value: InfraSnapshotMetricType.cpu,
},
{
text: MemoryUsage,
value: InfraMetricType.memory,
value: InfraSnapshotMetricType.memory,
},
{
text: intl.formatMessage({
id: 'xpack.infra.waffle.metricOptions.loadText',
defaultMessage: 'Load',
}),
value: InfraMetricType.load,
value: InfraSnapshotMetricType.load,
},
{
text: InboundTraffic,
value: InfraMetricType.rx,
value: InfraSnapshotMetricType.rx,
},
{
text: OutboundTraffic,
value: InfraMetricType.tx,
value: InfraSnapshotMetricType.tx,
},
{
text: intl.formatMessage({
id: 'xpack.infra.waffle.metricOptions.hostLogRateText',
defaultMessage: 'Log Rate',
}),
value: InfraMetricType.logRate,
value: InfraSnapshotMetricType.logRate,
},
],
};
@ -193,7 +197,7 @@ export const WaffleMetricControls = injectI18n(
this.setState(state => ({ isPopoverOpen: !state.isPopoverOpen }));
};
private handleClick = (value: InfraMetricType) => () => {
private handleClick = (value: InfraSnapshotMetricType) => () => {
this.props.onChange({ type: value });
this.handleClose();
};

View file

@ -8,18 +8,18 @@ import { EuiButtonGroup } from '@elastic/eui';
import { InjectedIntl, injectI18n } from '@kbn/i18n/react';
import React from 'react';
import {
InfraMetricInput,
InfraMetricType,
InfraSnapshotMetricInput,
InfraSnapshotMetricType,
InfraNodeType,
InfraPathInput,
InfraSnapshotGroupbyInput,
} from '../../graphql/types';
interface Props {
intl: InjectedIntl;
nodeType: InfraNodeType;
changeNodeType: (nodeType: InfraNodeType) => void;
changeGroupBy: (groupBy: InfraPathInput[]) => void;
changeMetric: (metric: InfraMetricInput) => void;
changeGroupBy: (groupBy: InfraSnapshotGroupbyInput[]) => void;
changeMetric: (metric: InfraSnapshotMetricInput) => void;
}
export class WaffleNodeTypeSwitcherClass extends React.PureComponent<Props> {
@ -59,7 +59,7 @@ export class WaffleNodeTypeSwitcherClass extends React.PureComponent<Props> {
private handleClick = (nodeType: string) => {
this.props.changeNodeType(nodeType as InfraNodeType);
this.props.changeGroupBy([]);
this.props.changeMetric({ type: InfraMetricType.cpu });
this.props.changeMetric({ type: InfraSnapshotMetricType.cpu });
};
}

View file

@ -7,7 +7,7 @@
import { i18n } from '@kbn/i18n';
import { first, last } from 'lodash';
import { InfraNode, InfraNodePath } from '../../graphql/types';
import { InfraSnapshotNode, InfraSnapshotNodePath } from '../../graphql/types';
import {
InfraWaffleMapGroup,
InfraWaffleMapGroupOfGroups,
@ -16,13 +16,13 @@ import {
} from '../../lib/lib';
import { isWaffleMapGroupWithGroups, isWaffleMapGroupWithNodes } from './type_guards';
export function createId(path: InfraNodePath[]) {
export function createId(path: InfraSnapshotNodePath[]) {
return path.map(p => p.value).join('/');
}
function findOrCreateGroupWithNodes(
groups: InfraWaffleMapGroup[],
path: InfraNodePath[]
path: InfraSnapshotNodePath[]
): InfraWaffleMapGroupOfNodes {
const id = path.length === 0 ? '__all__' : createId(path);
/**
@ -62,7 +62,7 @@ function findOrCreateGroupWithNodes(
function findOrCreateGroupWithGroups(
groups: InfraWaffleMapGroup[],
path: InfraNodePath[]
path: InfraSnapshotNodePath[]
): InfraWaffleMapGroupOfGroups {
const id = path.length === 0 ? '__all__' : createId(path);
const lastPath = last(path);
@ -85,7 +85,7 @@ function findOrCreateGroupWithGroups(
};
}
export function createWaffleMapNode(node: InfraNode): InfraWaffleMapNode {
export function createWaffleMapNode(node: InfraSnapshotNode): InfraWaffleMapNode {
const nodePathItem = last(node.path);
if (!nodePathItem) {
throw new Error('There must be at least one node path item');
@ -105,8 +105,8 @@ function withoutGroup(group: InfraWaffleMapGroup) {
};
}
export function nodesToWaffleMap(nodes: InfraNode[]): InfraWaffleMapGroup[] {
return nodes.reduce((groups: InfraWaffleMapGroup[], node: InfraNode) => {
export function nodesToWaffleMap(nodes: InfraSnapshotNode[]): InfraWaffleMapGroup[] {
return nodes.reduce((groups: InfraWaffleMapGroup[], node: InfraSnapshotNode) => {
const waffleNode = createWaffleMapNode(node);
if (node.path.length === 2) {
const parentGroup = findOrCreateGroupWithNodes(

View file

@ -11,13 +11,14 @@ export const waffleNodesQuery = gql`
$sourceId: ID!
$timerange: InfraTimerangeInput!
$filterQuery: String
$metric: InfraMetricInput!
$path: [InfraPathInput!]!
$metric: InfraSnapshotMetricInput!
$groupBy: [InfraSnapshotGroupbyInput!]!
$type: InfraNodeType!
) {
source(id: $sourceId) {
id
map(timerange: $timerange, filterQuery: $filterQuery) {
nodes(path: $path, metric: $metric) {
snapshot(timerange: $timerange, filterQuery: $filterQuery) {
nodes(groupBy: $groupBy, metric: $metric, type: $type) {
path {
value
label

View file

@ -8,18 +8,17 @@ import React from 'react';
import { Query } from 'react-apollo';
import {
InfraMetricInput,
InfraNode,
InfraSnapshotMetricInput,
InfraSnapshotNode,
InfraNodeType,
InfraPathInput,
InfraPathType,
InfraSnapshotGroupbyInput,
InfraTimerangeInput,
WaffleNodesQuery,
} from '../../graphql/types';
import { waffleNodesQuery } from './waffle_nodes.gql_query';
interface WithWaffleNodesArgs {
nodes: InfraNode[];
nodes: InfraSnapshotNode[];
loading: boolean;
refetch: () => void;
}
@ -27,19 +26,13 @@ interface WithWaffleNodesArgs {
interface WithWaffleNodesProps {
children: (args: WithWaffleNodesArgs) => React.ReactNode;
filterQuery: string | null | undefined;
metric: InfraMetricInput;
groupBy: InfraPathInput[];
metric: InfraSnapshotMetricInput;
groupBy: InfraSnapshotGroupbyInput[];
nodeType: InfraNodeType;
sourceId: string;
timerange: InfraTimerangeInput;
}
const NODE_TYPE_TO_PATH_TYPE = {
[InfraNodeType.container]: InfraPathType.containers,
[InfraNodeType.host]: InfraPathType.hosts,
[InfraNodeType.pod]: InfraPathType.pods,
};
export const WithWaffleNodes = ({
children,
filterQuery,
@ -56,7 +49,8 @@ export const WithWaffleNodes = ({
variables={{
sourceId,
metric,
path: [...groupBy, { type: NODE_TYPE_TO_PATH_TYPE[nodeType] }],
groupBy: [...groupBy],
type: nodeType,
timerange,
filterQuery,
}}
@ -65,8 +59,8 @@ export const WithWaffleNodes = ({
children({
loading,
nodes:
data && data.source && data.source.map && data.source.map.nodes
? data.source.map.nodes
data && data.source && data.source.snapshot && data.source.snapshot.nodes
? data.source.snapshot.nodes
: [],
refetch,
})

View file

@ -10,10 +10,10 @@ import { createSelector } from 'reselect';
import { isBoolean, isNumber } from 'lodash';
import {
InfraMetricInput,
InfraMetricType,
InfraSnapshotMetricInput,
InfraSnapshotMetricType,
InfraNodeType,
InfraPathType,
InfraSnapshotGroupbyInput,
} from '../../graphql/types';
import { InfraGroupByOptions } from '../../lib/lib';
import { State, waffleOptionsActions, waffleOptionsSelectors } from '../../store';
@ -158,29 +158,26 @@ const mapToUrlState = (value: any): WaffleOptionsUrlState | undefined =>
}
: undefined;
const isInfraMetricInput = (subject: any): subject is InfraMetricInput => {
return subject != null && subject.type != null && InfraMetricType[subject.type] != null;
const isInfraSnapshotMetricInput = (subject: any): subject is InfraSnapshotMetricInput => {
return subject != null && subject.type != null && InfraSnapshotMetricType[subject.type] != null;
};
const isInfraPathInput = (subject: any): subject is InfraPathType => {
return subject != null && subject.type != null && InfraPathType[subject.type] != null;
const isInfraSnapshotGroupbyInput = (subject: any): subject is InfraSnapshotGroupbyInput => {
return subject != null && subject.type != null;
};
const isInfraGroupByOption = (subject: any): subject is InfraGroupByOptions => {
return (
subject != null &&
subject.text != null &&
subject.field != null &&
InfraPathType[subject.type] != null
);
return subject != null && subject.text != null && subject.field != null;
};
const mapToMetricUrlState = (subject: any) => {
return subject && isInfraMetricInput(subject) ? subject : undefined;
return subject && isInfraSnapshotMetricInput(subject) ? subject : undefined;
};
const mapToGroupByUrlState = (subject: any) => {
return subject && Array.isArray(subject) && subject.every(isInfraPathInput) ? subject : undefined;
return subject && Array.isArray(subject) && subject.every(isInfraSnapshotGroupbyInput)
? subject
: undefined;
};
const mapToNodeTypeUrlState = (subject: any) => {

View file

@ -7,13 +7,8 @@
import moment from 'moment';
import React from 'react';
import { InfraMetricType, InfraPathType } from '../graphql/types';
import {
InfraFormatterType,
InfraOptions,
InfraWaffleMapLegendMode,
// InfraWaffleMapRuleOperator,
} from '../lib/lib';
import { InfraSnapshotMetricType } from '../graphql/types';
import { InfraFormatterType, InfraOptions, InfraWaffleMapLegendMode } from '../lib/lib';
import { RendererFunction } from '../utils/typed_react';
const initialState = {
@ -29,27 +24,8 @@ const initialState = {
wafflemap: {
formatter: InfraFormatterType.percent,
formatTemplate: '{{value}}',
metric: { type: InfraMetricType.cpu },
path: [{ type: InfraPathType.hosts }],
/*
legend: {
type: InfraWaffleMapLegendMode.step,
rules: [
{
value: 0,
color: '#00B3A4',
operator: InfraWaffleMapRuleOperator.gte,
label: 'Ok',
},
{
value: 10000,
color: '#DB1374',
operator: InfraWaffleMapRuleOperator.gte,
label: 'Over 10,000',
},
],
},
*/
metric: { type: InfraSnapshotMetricType.cpu },
groupBy: [],
legend: {
type: InfraWaffleMapLegendMode.gradient,
rules: [

View file

@ -322,35 +322,6 @@
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "map",
"description": "A hierarchy of hosts, pods, containers, services or arbitrary groups",
"args": [
{
"name": "timerange",
"description": "",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "INPUT_OBJECT",
"name": "InfraTimerangeInput",
"ofType": null
}
},
"defaultValue": null
},
{
"name": "filterQuery",
"description": "",
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
"defaultValue": null
}
],
"type": { "kind": "OBJECT", "name": "InfraResponse", "ofType": null },
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "snapshot",
"description": "A snapshot of nodes",
@ -1513,347 +1484,6 @@
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "InfraResponse",
"description": "",
"fields": [
{
"name": "nodes",
"description": "",
"args": [
{
"name": "path",
"description": "",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": { "kind": "INPUT_OBJECT", "name": "InfraPathInput", "ofType": null }
}
}
},
"defaultValue": null
},
{
"name": "metric",
"description": "",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": { "kind": "INPUT_OBJECT", "name": "InfraMetricInput", "ofType": null }
},
"defaultValue": null
}
],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": { "kind": "OBJECT", "name": "InfraNode", "ofType": null }
}
}
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "INPUT_OBJECT",
"name": "InfraPathInput",
"description": "",
"fields": null,
"inputFields": [
{
"name": "type",
"description": "The type of path",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": { "kind": "ENUM", "name": "InfraPathType", "ofType": null }
},
"defaultValue": null
},
{
"name": "label",
"description": "The label to use in the results for the group by for the terms group by",
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
"defaultValue": null
},
{
"name": "field",
"description": "The field to group by from a terms aggregation, this is ignored by the filter type",
"type": { "kind": "SCALAR", "name": "String", "ofType": null },
"defaultValue": null
},
{
"name": "filters",
"description": "The fitlers for the filter group by",
"type": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": { "kind": "INPUT_OBJECT", "name": "InfraPathFilterInput", "ofType": null }
}
},
"defaultValue": null
}
],
"interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{
"kind": "ENUM",
"name": "InfraPathType",
"description": "",
"fields": null,
"inputFields": null,
"interfaces": null,
"enumValues": [
{ "name": "terms", "description": "", "isDeprecated": false, "deprecationReason": null },
{
"name": "filters",
"description": "",
"isDeprecated": false,
"deprecationReason": null
},
{ "name": "hosts", "description": "", "isDeprecated": false, "deprecationReason": null },
{ "name": "pods", "description": "", "isDeprecated": false, "deprecationReason": null },
{
"name": "containers",
"description": "",
"isDeprecated": false,
"deprecationReason": null
},
{ "name": "custom", "description": "", "isDeprecated": false, "deprecationReason": null }
],
"possibleTypes": null
},
{
"kind": "INPUT_OBJECT",
"name": "InfraPathFilterInput",
"description": "A group by filter",
"fields": null,
"inputFields": [
{
"name": "label",
"description": "The label for the filter, this will be used as the group name in the final results",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": { "kind": "SCALAR", "name": "String", "ofType": null }
},
"defaultValue": null
},
{
"name": "query",
"description": "The query string query",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": { "kind": "SCALAR", "name": "String", "ofType": null }
},
"defaultValue": null
}
],
"interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{
"kind": "INPUT_OBJECT",
"name": "InfraMetricInput",
"description": "",
"fields": null,
"inputFields": [
{
"name": "type",
"description": "The type of metric",
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": { "kind": "ENUM", "name": "InfraMetricType", "ofType": null }
},
"defaultValue": null
}
],
"interfaces": null,
"enumValues": null,
"possibleTypes": null
},
{
"kind": "ENUM",
"name": "InfraMetricType",
"description": "",
"fields": null,
"inputFields": null,
"interfaces": null,
"enumValues": [
{ "name": "count", "description": "", "isDeprecated": false, "deprecationReason": null },
{ "name": "cpu", "description": "", "isDeprecated": false, "deprecationReason": null },
{ "name": "load", "description": "", "isDeprecated": false, "deprecationReason": null },
{ "name": "memory", "description": "", "isDeprecated": false, "deprecationReason": null },
{ "name": "tx", "description": "", "isDeprecated": false, "deprecationReason": null },
{ "name": "rx", "description": "", "isDeprecated": false, "deprecationReason": null },
{ "name": "logRate", "description": "", "isDeprecated": false, "deprecationReason": null }
],
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "InfraNode",
"description": "",
"fields": [
{
"name": "path",
"description": "",
"args": [],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": {
"kind": "LIST",
"name": null,
"ofType": {
"kind": "NON_NULL",
"name": null,
"ofType": { "kind": "OBJECT", "name": "InfraNodePath", "ofType": null }
}
}
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "metric",
"description": "",
"args": [],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": { "kind": "OBJECT", "name": "InfraNodeMetric", "ofType": null }
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "InfraNodePath",
"description": "",
"fields": [
{
"name": "value",
"description": "",
"args": [],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": { "kind": "SCALAR", "name": "String", "ofType": null }
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "label",
"description": "",
"args": [],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": { "kind": "SCALAR", "name": "String", "ofType": null }
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "InfraNodeMetric",
"description": "",
"fields": [
{
"name": "name",
"description": "",
"args": [],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": { "kind": "ENUM", "name": "InfraMetricType", "ofType": null }
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "value",
"description": "",
"args": [],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": { "kind": "SCALAR", "name": "Float", "ofType": null }
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "avg",
"description": "",
"args": [],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": { "kind": "SCALAR", "name": "Float", "ofType": null }
},
"isDeprecated": false,
"deprecationReason": null
},
{
"name": "max",
"description": "",
"args": [],
"type": {
"kind": "NON_NULL",
"name": null,
"ofType": { "kind": "SCALAR", "name": "Float", "ofType": null }
},
"isDeprecated": false,
"deprecationReason": null
}
],
"inputFields": null,
"interfaces": [],
"enumValues": null,
"possibleTypes": null
},
{
"kind": "OBJECT",
"name": "InfraSnapshotResponse",
@ -3506,22 +3136,6 @@
}
],
"possibleTypes": null
},
{
"kind": "ENUM",
"name": "InfraOperator",
"description": "",
"fields": null,
"inputFields": null,
"interfaces": null,
"enumValues": [
{ "name": "gt", "description": "", "isDeprecated": false, "deprecationReason": null },
{ "name": "gte", "description": "", "isDeprecated": false, "deprecationReason": null },
{ "name": "lt", "description": "", "isDeprecated": false, "deprecationReason": null },
{ "name": "lte", "description": "", "isDeprecated": false, "deprecationReason": null },
{ "name": "eq", "description": "", "isDeprecated": false, "deprecationReason": null }
],
"possibleTypes": null
}
],
"directives": [

View file

@ -36,8 +36,6 @@ export interface InfraSource {
logSummaryBetween: InfraLogSummaryInterval;
logItem: InfraLogItem;
/** A hierarchy of hosts, pods, containers, services or arbitrary groups */
map?: InfraResponse | null;
/** A snapshot of nodes */
snapshot?: InfraSnapshotResponse | null;
@ -199,32 +197,6 @@ export interface InfraLogItemField {
value: string;
}
export interface InfraResponse {
nodes: InfraNode[];
}
export interface InfraNode {
path: InfraNodePath[];
metric: InfraNodeMetric;
}
export interface InfraNodePath {
value: string;
label: string;
}
export interface InfraNodeMetric {
name: InfraMetricType;
value: number;
avg: number;
max: number;
}
export interface InfraSnapshotResponse {
/** Nodes of type host, container or pod grouped by 0, 1 or 2 terms */
nodes: InfraSnapshotNode[];
@ -313,29 +285,6 @@ export interface InfraTimerangeInput {
from: number;
}
export interface InfraPathInput {
/** The type of path */
type: InfraPathType;
/** The label to use in the results for the group by for the terms group by */
label?: string | null;
/** The field to group by from a terms aggregation, this is ignored by the filter type */
field?: string | null;
/** The fitlers for the filter group by */
filters?: InfraPathFilterInput[] | null;
}
/** A group by filter */
export interface InfraPathFilterInput {
/** The label for the filter, this will be used as the group name in the final results */
label: string;
/** The query string query */
query: string;
}
export interface InfraMetricInput {
/** The type of metric */
type: InfraMetricType;
}
export interface InfraSnapshotGroupbyInput {
/** The label to use in the results for the group by for the terms group by */
label?: string | null;
@ -463,11 +412,6 @@ export interface LogSummaryBetweenInfraSourceArgs {
export interface LogItemInfraSourceArgs {
id: string;
}
export interface MapInfraSourceArgs {
timerange: InfraTimerangeInput;
filterQuery?: string | null;
}
export interface SnapshotInfraSourceArgs {
timerange: InfraTimerangeInput;
@ -485,11 +429,6 @@ export interface MetricsInfraSourceArgs {
export interface IndexFieldsInfraSourceStatusArgs {
indexType?: InfraIndexType | null;
}
export interface NodesInfraResponseArgs {
path: InfraPathInput[];
metric: InfraMetricInput;
}
export interface NodesInfraSnapshotResponseArgs {
type: InfraNodeType;
@ -530,25 +469,6 @@ export enum InfraNodeType {
host = 'host',
}
export enum InfraPathType {
terms = 'terms',
filters = 'filters',
hosts = 'hosts',
pods = 'pods',
containers = 'containers',
custom = 'custom',
}
export enum InfraMetricType {
count = 'count',
cpu = 'cpu',
load = 'load',
memory = 'memory',
tx = 'tx',
rx = 'rx',
logRate = 'logRate',
}
export enum InfraSnapshotMetricType {
count = 'count',
cpu = 'cpu',
@ -589,14 +509,6 @@ export enum InfraMetric {
nginxRequestsPerConnection = 'nginxRequestsPerConnection',
}
export enum InfraOperator {
gt = 'gt',
gte = 'gte',
lt = 'lt',
lte = 'lte',
eq = 'eq',
}
// ====================================================
// Unions
// ====================================================
@ -872,8 +784,9 @@ export namespace WaffleNodesQuery {
sourceId: string;
timerange: InfraTimerangeInput;
filterQuery?: string | null;
metric: InfraMetricInput;
path: InfraPathInput[];
metric: InfraSnapshotMetricInput;
groupBy: InfraSnapshotGroupbyInput[];
type: InfraNodeType;
};
export type Query = {
@ -887,17 +800,17 @@ export namespace WaffleNodesQuery {
id: string;
map?: Map | null;
snapshot?: Snapshot | null;
};
export type Map = {
__typename?: 'InfraResponse';
export type Snapshot = {
__typename?: 'InfraSnapshotResponse';
nodes: Nodes[];
};
export type Nodes = {
__typename?: 'InfraNode';
__typename?: 'InfraSnapshotNode';
path: Path[];
@ -905,7 +818,7 @@ export namespace WaffleNodesQuery {
};
export type Path = {
__typename?: 'InfraNodePath';
__typename?: 'InfraSnapshotNodePath';
value: string;
@ -913,15 +826,15 @@ export namespace WaffleNodesQuery {
};
export type Metric = {
__typename?: 'InfraNodeMetric';
__typename?: 'InfraSnapshotNodeMetric';
name: InfraMetricType;
name: InfraSnapshotMetricType;
value: number;
value?: number | null;
avg: number;
avg?: number | null;
max: number;
max?: number | null;
};
}

View file

@ -11,11 +11,10 @@ import { AxiosRequestConfig } from 'axios';
import React from 'react';
import { Observable } from 'rxjs';
import {
InfraMetricInput,
InfraNodeMetric,
InfraNodePath,
InfraPathInput,
InfraPathType,
InfraSnapshotMetricInput,
InfraSnapshotNodeMetric,
InfraSnapshotNodePath,
InfraSnapshotGroupbyInput,
InfraTimerangeInput,
SourceQuery,
} from '../graphql/types';
@ -103,8 +102,8 @@ export interface InfraWaffleMapNode {
pathId: string;
id: string;
name: string;
path: InfraNodePath[];
metric: InfraNodeMetric;
path: InfraSnapshotNodePath[];
metric: InfraSnapshotNodeMetric;
}
export type InfraWaffleMapGroup = InfraWaffleMapGroupOfNodes | InfraWaffleMapGroupOfGroups;
@ -166,9 +165,8 @@ export interface InfraWaffleMapOptions {
fields?: SourceQuery.Query['source']['configuration']['fields'] | null;
formatter: InfraFormatterType;
formatTemplate: string;
metric: InfraMetricInput;
path: InfraPathInput[];
groupBy: InfraPathInput[];
metric: InfraSnapshotMetricInput;
groupBy: InfraSnapshotGroupbyInput[];
legend: InfraWaffleMapLegend;
}
@ -205,6 +203,5 @@ export enum InfraWaffleMapDataFormat {
export interface InfraGroupByOptions {
text: string;
type: InfraPathType;
field: string;
}

View file

@ -5,13 +5,17 @@
*/
import actionCreatorFactory from 'typescript-fsa';
import { InfraMetricInput, InfraNodeType, InfraPathInput } from '../../../graphql/types';
import {
InfraSnapshotMetricInput,
InfraNodeType,
InfraSnapshotGroupbyInput,
} from '../../../graphql/types';
import { InfraGroupByOptions, InfraWaffleMapBounds } from '../../../lib/lib';
const actionCreator = actionCreatorFactory('x-pack/infra/local/waffle_options');
export const changeMetric = actionCreator<InfraMetricInput>('CHANGE_METRIC');
export const changeGroupBy = actionCreator<InfraPathInput[]>('CHANGE_GROUP_BY');
export const changeMetric = actionCreator<InfraSnapshotMetricInput>('CHANGE_METRIC');
export const changeGroupBy = actionCreator<InfraSnapshotGroupbyInput[]>('CHANGE_GROUP_BY');
export const changeCustomOptions = actionCreator<InfraGroupByOptions[]>('CHANGE_CUSTOM_OPTIONS');
export const changeNodeType = actionCreator<InfraNodeType>('CHANGE_NODE_TYPE');
export const changeView = actionCreator<string>('CHANGE_VIEW');

View file

@ -8,10 +8,10 @@ import { combineReducers } from 'redux';
import { reducerWithInitialState } from 'typescript-fsa-reducers';
import {
InfraMetricInput,
InfraMetricType,
InfraSnapshotMetricInput,
InfraSnapshotMetricType,
InfraNodeType,
InfraPathInput,
InfraSnapshotGroupbyInput,
} from '../../../graphql/types';
import { InfraGroupByOptions, InfraWaffleMapBounds } from '../../../lib/lib';
import {
@ -25,8 +25,8 @@ import {
} from './actions';
export interface WaffleOptionsState {
metric: InfraMetricInput;
groupBy: InfraPathInput[];
metric: InfraSnapshotMetricInput;
groupBy: InfraSnapshotGroupbyInput[];
nodeType: InfraNodeType;
view: string;
customOptions: InfraGroupByOptions[];
@ -35,7 +35,7 @@ export interface WaffleOptionsState {
}
export const initialWaffleOptionsState: WaffleOptionsState = {
metric: { type: InfraMetricType.cpu },
metric: { type: InfraSnapshotMetricType.cpu },
groupBy: [],
nodeType: InfraNodeType.host,
view: 'map',

View file

@ -9,7 +9,6 @@ import { sharedSchema } from '../../common/graphql/shared/schema.gql';
import { logEntriesSchema } from './log_entries/schema.gql';
import { metadataSchema } from './metadata/schema.gql';
import { metricsSchema } from './metrics/schema.gql';
import { nodesSchema } from './nodes/schema.gql';
import { snapshotSchema } from './snapshot/schema.gql';
import { sourceStatusSchema } from './source_status/schema.gql';
import { sourcesSchema } from './sources/schema.gql';
@ -19,7 +18,6 @@ export const schemas = [
sharedSchema,
metadataSchema,
logEntriesSchema,
nodesSchema,
snapshotSchema,
sourcesSchema,
sourceStatusSchema,

View file

@ -1,8 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export { createNodeResolvers } from './resolvers';
export { nodesSchema } from './schema.gql';

View file

@ -1,72 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraResponseResolvers, InfraSourceResolvers } from '../../graphql/types';
import { InfraNodeRequestOptions } from '../../lib/adapters/nodes';
import { extractGroupByAndNodeFromPath } from '../../lib/adapters/nodes/extract_group_by_and_node_from_path';
import { InfraNodesDomain } from '../../lib/domains/nodes_domain';
import { UsageCollector } from '../../usage/usage_collector';
import { parseFilterQuery } from '../../utils/serialized_query';
import { ChildResolverOf, InfraResolverOf, ResultOf } from '../../utils/typed_resolvers';
import { QuerySourceResolver } from '../sources/resolvers';
type InfraSourceMapResolver = ChildResolverOf<
InfraResolverOf<
InfraSourceResolvers.MapResolver<
{
source: ResultOf<QuerySourceResolver>;
} & InfraSourceResolvers.MapArgs
>
>,
QuerySourceResolver
>;
type InfraNodesResolver = ChildResolverOf<
InfraResolverOf<InfraResponseResolvers.NodesResolver>,
InfraSourceMapResolver
>;
interface NodesResolversDeps {
nodes: InfraNodesDomain;
}
export const createNodeResolvers = (
libs: NodesResolversDeps
): {
InfraSource: {
map: InfraSourceMapResolver;
};
InfraResponse: {
nodes: InfraNodesResolver;
};
} => ({
InfraSource: {
async map(source, args) {
return {
source,
timerange: args.timerange,
filterQuery: args.filterQuery,
};
},
},
InfraResponse: {
async nodes(mapResponse, args, { req }) {
const { source, timerange, filterQuery } = mapResponse;
const { groupBy, nodeType } = extractGroupByAndNodeFromPath(args.path);
UsageCollector.countNode(nodeType);
const options: InfraNodeRequestOptions = {
filterQuery: parseFilterQuery(filterQuery),
nodeType,
groupBy,
sourceConfiguration: source.configuration,
metric: args.metric,
timerange,
};
return await libs.nodes.getNodes(req, options);
},
},
});

View file

@ -1,86 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import gql from 'graphql-tag';
export const nodesSchema: any = gql`
type InfraNodeMetric {
name: InfraMetricType!
value: Float!
avg: Float!
max: Float!
}
type InfraNodePath {
value: String!
label: String!
}
type InfraNode {
path: [InfraNodePath!]!
metric: InfraNodeMetric!
}
enum InfraOperator {
gt
gte
lt
lte
eq
}
enum InfraMetricType {
count
cpu
load
memory
tx
rx
logRate
}
input InfraMetricInput {
"The type of metric"
type: InfraMetricType!
}
enum InfraPathType {
terms
filters
hosts
pods
containers
custom
}
input InfraPathInput {
"The type of path"
type: InfraPathType!
"The label to use in the results for the group by for the terms group by"
label: String
"The field to group by from a terms aggregation, this is ignored by the filter type"
field: String
"The fitlers for the filter group by"
filters: [InfraPathFilterInput!]
}
"A group by filter"
input InfraPathFilterInput {
"The label for the filter, this will be used as the group name in the final results"
label: String!
"The query string query"
query: String!
}
type InfraResponse {
nodes(path: [InfraPathInput!]!, metric: InfraMetricInput!): [InfraNode!]!
}
extend type InfraSource {
"A hierarchy of hosts, pods, containers, services or arbitrary groups"
map(timerange: InfraTimerangeInput!, filterQuery: String): InfraResponse
}
`;

View file

@ -64,8 +64,6 @@ export interface InfraSource {
logSummaryBetween: InfraLogSummaryInterval;
logItem: InfraLogItem;
/** A hierarchy of hosts, pods, containers, services or arbitrary groups */
map?: InfraResponse | null;
/** A snapshot of nodes */
snapshot?: InfraSnapshotResponse | null;
@ -227,32 +225,6 @@ export interface InfraLogItemField {
value: string;
}
export interface InfraResponse {
nodes: InfraNode[];
}
export interface InfraNode {
path: InfraNodePath[];
metric: InfraNodeMetric;
}
export interface InfraNodePath {
value: string;
label: string;
}
export interface InfraNodeMetric {
name: InfraMetricType;
value: number;
avg: number;
max: number;
}
export interface InfraSnapshotResponse {
/** Nodes of type host, container or pod grouped by 0, 1 or 2 terms */
nodes: InfraSnapshotNode[];
@ -341,29 +313,6 @@ export interface InfraTimerangeInput {
from: number;
}
export interface InfraPathInput {
/** The type of path */
type: InfraPathType;
/** The label to use in the results for the group by for the terms group by */
label?: string | null;
/** The field to group by from a terms aggregation, this is ignored by the filter type */
field?: string | null;
/** The fitlers for the filter group by */
filters?: InfraPathFilterInput[] | null;
}
/** A group by filter */
export interface InfraPathFilterInput {
/** The label for the filter, this will be used as the group name in the final results */
label: string;
/** The query string query */
query: string;
}
export interface InfraMetricInput {
/** The type of metric */
type: InfraMetricType;
}
export interface InfraSnapshotGroupbyInput {
/** The label to use in the results for the group by for the terms group by */
label?: string | null;
@ -491,11 +440,6 @@ export interface LogSummaryBetweenInfraSourceArgs {
export interface LogItemInfraSourceArgs {
id: string;
}
export interface MapInfraSourceArgs {
timerange: InfraTimerangeInput;
filterQuery?: string | null;
}
export interface SnapshotInfraSourceArgs {
timerange: InfraTimerangeInput;
@ -513,11 +457,6 @@ export interface MetricsInfraSourceArgs {
export interface IndexFieldsInfraSourceStatusArgs {
indexType?: InfraIndexType | null;
}
export interface NodesInfraResponseArgs {
path: InfraPathInput[];
metric: InfraMetricInput;
}
export interface NodesInfraSnapshotResponseArgs {
type: InfraNodeType;
@ -558,25 +497,6 @@ export enum InfraNodeType {
host = 'host',
}
export enum InfraPathType {
terms = 'terms',
filters = 'filters',
hosts = 'hosts',
pods = 'pods',
containers = 'containers',
custom = 'custom',
}
export enum InfraMetricType {
count = 'count',
cpu = 'cpu',
load = 'load',
memory = 'memory',
tx = 'tx',
rx = 'rx',
logRate = 'logRate',
}
export enum InfraSnapshotMetricType {
count = 'count',
cpu = 'cpu',
@ -617,14 +537,6 @@ export enum InfraMetric {
nginxRequestsPerConnection = 'nginxRequestsPerConnection',
}
export enum InfraOperator {
gt = 'gt',
gte = 'gte',
lt = 'lt',
lte = 'lte',
eq = 'eq',
}
// ====================================================
// Unions
// ====================================================
@ -688,8 +600,6 @@ export namespace InfraSourceResolvers {
logSummaryBetween?: LogSummaryBetweenResolver<InfraLogSummaryInterval, TypeParent, Context>;
logItem?: LogItemResolver<InfraLogItem, TypeParent, Context>;
/** A hierarchy of hosts, pods, containers, services or arbitrary groups */
map?: MapResolver<InfraResponse | null, TypeParent, Context>;
/** A snapshot of nodes */
snapshot?: SnapshotResolver<InfraSnapshotResponse | null, TypeParent, Context>;
@ -791,17 +701,6 @@ export namespace InfraSourceResolvers {
id: string;
}
export type MapResolver<
R = InfraResponse | null,
Parent = InfraSource,
Context = InfraContext
> = Resolver<R, Parent, Context, MapArgs>;
export interface MapArgs {
timerange: InfraTimerangeInput;
filterQuery?: string | null;
}
export type SnapshotResolver<
R = InfraSnapshotResponse | null,
Parent = InfraSource,
@ -1312,94 +1211,6 @@ export namespace InfraLogItemFieldResolvers {
> = Resolver<R, Parent, Context>;
}
export namespace InfraResponseResolvers {
export interface Resolvers<Context = InfraContext, TypeParent = InfraResponse> {
nodes?: NodesResolver<InfraNode[], TypeParent, Context>;
}
export type NodesResolver<
R = InfraNode[],
Parent = InfraResponse,
Context = InfraContext
> = Resolver<R, Parent, Context, NodesArgs>;
export interface NodesArgs {
path: InfraPathInput[];
metric: InfraMetricInput;
}
}
export namespace InfraNodeResolvers {
export interface Resolvers<Context = InfraContext, TypeParent = InfraNode> {
path?: PathResolver<InfraNodePath[], TypeParent, Context>;
metric?: MetricResolver<InfraNodeMetric, TypeParent, Context>;
}
export type PathResolver<
R = InfraNodePath[],
Parent = InfraNode,
Context = InfraContext
> = Resolver<R, Parent, Context>;
export type MetricResolver<
R = InfraNodeMetric,
Parent = InfraNode,
Context = InfraContext
> = Resolver<R, Parent, Context>;
}
export namespace InfraNodePathResolvers {
export interface Resolvers<Context = InfraContext, TypeParent = InfraNodePath> {
value?: ValueResolver<string, TypeParent, Context>;
label?: LabelResolver<string, TypeParent, Context>;
}
export type ValueResolver<R = string, Parent = InfraNodePath, Context = InfraContext> = Resolver<
R,
Parent,
Context
>;
export type LabelResolver<R = string, Parent = InfraNodePath, Context = InfraContext> = Resolver<
R,
Parent,
Context
>;
}
export namespace InfraNodeMetricResolvers {
export interface Resolvers<Context = InfraContext, TypeParent = InfraNodeMetric> {
name?: NameResolver<InfraMetricType, TypeParent, Context>;
value?: ValueResolver<number, TypeParent, Context>;
avg?: AvgResolver<number, TypeParent, Context>;
max?: MaxResolver<number, TypeParent, Context>;
}
export type NameResolver<
R = InfraMetricType,
Parent = InfraNodeMetric,
Context = InfraContext
> = Resolver<R, Parent, Context>;
export type ValueResolver<
R = number,
Parent = InfraNodeMetric,
Context = InfraContext
> = Resolver<R, Parent, Context>;
export type AvgResolver<R = number, Parent = InfraNodeMetric, Context = InfraContext> = Resolver<
R,
Parent,
Context
>;
export type MaxResolver<R = number, Parent = InfraNodeMetric, Context = InfraContext> = Resolver<
R,
Parent,
Context
>;
}
export namespace InfraSnapshotResponseResolvers {
export interface Resolvers<Context = InfraContext, TypeParent = InfraSnapshotResponse> {
/** Nodes of type host, container or pod grouped by 0, 1 or 2 terms */

View file

@ -9,7 +9,6 @@ import { schemas } from './graphql';
import { createLogEntriesResolvers } from './graphql/log_entries';
import { createMetadataResolvers } from './graphql/metadata';
import { createMetricResolvers } from './graphql/metrics/resolvers';
import { createNodeResolvers } from './graphql/nodes';
import { createSnapshotResolvers } from './graphql/snapshot';
import { createSourceStatusResolvers } from './graphql/source_status';
import { createSourcesResolvers } from './graphql/sources';
@ -21,7 +20,6 @@ export const initInfraServer = (libs: InfraBackendLibs) => {
resolvers: [
createMetadataResolvers(libs) as IResolvers,
createLogEntriesResolvers(libs) as IResolvers,
createNodeResolvers(libs) as IResolvers,
createSnapshotResolvers(libs) as IResolvers,
createSourcesResolvers(libs) as IResolvers,
createSourceStatusResolvers(libs) as IResolvers,

View file

@ -11,8 +11,8 @@ import {
InfraFrameworkRequest,
InfraMetadataAggregationResponse,
} from '../framework';
import { NAME_FIELDS } from '../nodes/constants';
import { InfraMetadataAdapter, InfraMetricsAdapterResponse } from './adapter_types';
import { NAME_FIELDS } from '../../constants';
export class ElasticsearchMetadataAdapter implements InfraMetadataAdapter {
private framework: InfraBackendFrameworkAdapter;

View file

@ -1,237 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { JsonObject } from '../../../../common/typed_json';
import {
InfraMetricInput,
InfraNode,
InfraPathFilterInput,
InfraPathInput,
InfraPathType,
InfraTimerangeInput,
} from '../../../graphql/types';
import { InfraSourceConfiguration } from '../../sources';
import { InfraFrameworkRequest } from '../framework';
export interface InfraNodesAdapter {
getNodes(req: InfraFrameworkRequest, options: InfraNodeRequestOptions): Promise<InfraNode[]>;
}
export interface InfraHostsFieldsObject {
name?: any;
metrics?: any;
groups?: [any];
}
export type InfraESQuery =
| InfraESBoolQuery
| InfraESRangeQuery
| InfraESExistsQuery
| InfraESQueryStringQuery
| InfraESMatchQuery
| JsonObject;
export interface InfraESExistsQuery {
exists: { field: string };
}
export interface InfraESQueryStringQuery {
query_string: {
query: string;
analyze_wildcard: boolean;
};
}
export interface InfraESRangeQuery {
range: {
[name: string]: {
gte: number;
lte: number;
format: string;
};
};
}
export interface InfraESMatchQuery {
match: {
[name: string]: {
query: string;
};
};
}
export interface InfraESBoolQuery {
bool: {
must?: InfraESQuery[];
should?: InfraESQuery[];
filter?: InfraESQuery[];
};
}
export interface InfraESMSearchHeader {
index: string[] | string;
}
export interface InfraESSearchBody {
query?: object;
aggregations?: object;
aggs?: object;
size?: number;
}
export type InfraESMSearchBody = InfraESSearchBody | InfraESMSearchHeader;
export interface InfraNodeRequestOptions {
nodeType: InfraNodeType;
sourceConfiguration: InfraSourceConfiguration;
timerange: InfraTimerangeInput;
groupBy: InfraPathInput[];
metric: InfraMetricInput;
filterQuery: InfraESQuery | undefined;
}
export enum InfraNodesKey {
hosts = 'hosts',
pods = 'pods',
containers = 'containers',
}
export enum InfraNodeType {
host = 'host',
pod = 'pod',
container = 'container',
}
export interface InfraNodesAggregations {
waffle: {
nodes: {
buckets: InfraBucket[];
};
};
}
export type InfraProcessorTransformer<T> = (doc: T) => T;
export type InfraProcessorChainFn<T> = (
next: InfraProcessorTransformer<T>
) => InfraProcessorTransformer<T>;
export type InfraProcessor<O, T> = (options: O) => InfraProcessorChainFn<T>;
export interface InfraProcesorRequestOptions {
nodeType: InfraNodeType;
nodeOptions: InfraNodeRequestOptions;
partitionId: number;
numberOfPartitions: number;
nodeField: string;
}
export interface InfraGroupByFilters {
id: string /** The UUID for the group by object */;
type: InfraPathType /** The type of aggregation to use to bucket the groups */;
label?:
| string
| null /** The label to use in the results for the group by for the terms group by */;
filters: InfraPathFilterInput[] /** The filters to use for the group by aggregation, this is ignored by the terms group by */;
}
export interface InfraGroupByTerms {
id: string /** The UUID for the group by object */;
type: InfraPathType /** The type of aggregation to use to bucket the groups */;
label?:
| string
| null /** The label to use in the results for the group by for the terms group by */;
field: string;
}
export interface InfraBucketWithKey {
key: string | number;
doc_count: number;
}
export interface InfraBucketWithAggs {
[name: string]: {
buckets: InfraBucket[];
};
}
export interface InfraBucketWithValues {
[name: string]: { value: number; normalized_value?: number };
}
export type InfraBucket = InfraBucketWithAggs & InfraBucketWithKey & InfraBucketWithValues;
export interface InfraGroupWithNodes {
name: string;
nodes: InfraNode[];
}
export interface InfraGroupWithSubGroups {
name: string;
groups: InfraGroupWithNodes[];
}
export type InfraNodeGroup = InfraGroupWithNodes | InfraGroupWithSubGroups;
export interface InfraNodesResponse {
total?: number;
}
export interface InfraGroupsResponse {
total: number;
groups: InfraNodeGroup[];
}
export interface InfraNodesOnlyResponse {
total: number;
nodes: InfraNode[];
}
export interface InfraAvgAgg {
avg: { field: string };
}
export interface InfraMaxAgg {
max: { field: string };
}
export interface InfraDerivativeAgg {
derivative: {
buckets_path: string;
gap_policy: string;
unit: string;
};
}
export interface InfraCumulativeSumAgg {
cumulative_sum: {
buckets_path: string;
};
}
export interface InfraBucketScriptAgg {
bucket_script: {
buckets_path: { [key: string]: string };
script: {
source: string;
lang: string;
};
gap_policy: string;
};
}
export type InfraAgg =
| InfraBucketScriptAgg
| InfraDerivativeAgg
| InfraAvgAgg
| InfraMaxAgg
| InfraCumulativeSumAgg
| undefined;
export interface InfraNodeMetricAgg {
[key: string]: InfraAgg;
}
export type InfraNodeMetricFn = (nodeType: InfraNodeType) => InfraNodeMetricAgg | undefined;

View file

@ -1,64 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraBackendFrameworkAdapter, InfraFrameworkRequest } from '../framework';
import {
InfraBucket,
InfraNodeRequestOptions,
InfraNodesAdapter,
InfraNodesAggregations,
} from './adapter_types';
import { InfraNode } from '../../../graphql/types';
import { calculateCardinalityOfNodeField } from './lib/calculate_cardinality';
import { createPartitionBodies } from './lib/create_partition_bodies';
import { processNodes } from './lib/process_nodes';
export class ElasticsearchNodesAdapter implements InfraNodesAdapter {
private framework: InfraBackendFrameworkAdapter;
constructor(framework: InfraBackendFrameworkAdapter) {
this.framework = framework;
}
public async getNodes(
req: InfraFrameworkRequest,
options: InfraNodeRequestOptions
): Promise<InfraNode[]> {
const search = <Aggregation>(searchOptions: object) =>
this.framework.callWithRequest<{}, Aggregation>(req, 'search', searchOptions);
const msearch = <Aggregation>(msearchOptions: object) =>
this.framework.callWithRequest<{}, Aggregation>(req, 'msearch', msearchOptions);
const nodeField = options.sourceConfiguration.fields[options.nodeType];
const totalNodes = await calculateCardinalityOfNodeField(search, nodeField, options);
if (totalNodes === 0) {
return [];
}
const body = createPartitionBodies(totalNodes, options.nodeType, nodeField, options);
const response = await msearch<InfraNodesAggregations>({
body,
});
if (response && response.responses) {
const nodeBuckets: InfraBucket[] = response.responses.reduce(
(current: InfraBucket[], resp) => {
if (!resp.aggregations) {
return current;
}
const buckets = resp.aggregations.waffle.nodes.buckets;
return current.concat(buckets);
},
[]
);
return processNodes(options, nodeBuckets);
}
return [];
}
}

View file

@ -1,56 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraPathInput, InfraPathType } from '../../../graphql/types';
import { InfraNodeType } from './adapter_types';
const getNodeType = (type: InfraPathType): InfraNodeType => {
switch (type) {
case InfraPathType.pods:
return InfraNodeType.pod;
case InfraPathType.containers:
return InfraNodeType.container;
case InfraPathType.hosts:
return InfraNodeType.host;
default:
throw new Error('Invalid InfraPathType');
}
};
const isEntityType = (path: InfraPathInput) => {
if (!path) {
return false;
}
switch (path.type) {
case InfraPathType.containers:
case InfraPathType.hosts:
case InfraPathType.pods:
return true;
default:
return false;
}
};
const moreThenOneEntityType = (path: InfraPathInput[]) => {
return path.filter(isEntityType).length > 1;
};
export function extractGroupByAndNodeFromPath(path: InfraPathInput[]) {
if (moreThenOneEntityType(path)) {
throw new Error('There can be only one entity type in the path.');
}
if (path.length > 3) {
throw new Error('The path can only have a maximum of 3 elements.');
}
const nodePart = path[path.length - 1];
if (!isEntityType(nodePart)) {
throw new Error(
'The last element in the path should be either a "hosts", "containers" or "pods" path type.'
);
}
const nodeType = getNodeType(nodePart.type);
const groupBy = path.slice(0, path.length - 1);
return { groupBy, nodeType };
}

View file

@ -1,7 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export * from './adapter_types';

View file

@ -1,51 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraDatabaseSearchResponse } from '../../framework';
import { InfraESQuery, InfraNodeRequestOptions } from '../adapter_types';
import { createQuery } from './create_query';
interface CardinalityOfFieldParams {
size: number;
query: InfraESQuery;
aggs: {
nodeCount: { cardinality: { field: string } };
};
}
interface CardinalityAggregation {
nodeCount: { value: number };
}
export async function calculateCardinalityOfNodeField(
search: <Aggregation>(options: object) => Promise<InfraDatabaseSearchResponse<{}, Aggregation>>,
nodeField: string,
options: InfraNodeRequestOptions
): Promise<number> {
const { sourceConfiguration }: InfraNodeRequestOptions = options;
const body: CardinalityOfFieldParams = {
aggs: {
nodeCount: {
cardinality: { field: nodeField },
},
},
query: createQuery(options),
size: 0,
};
const resp = await search<CardinalityAggregation>({
allow_no_indices: true,
body,
ignore_unavailable: true,
index: `${sourceConfiguration.logAlias},${sourceConfiguration.metricAlias}`,
});
if (resp.aggregations) {
return resp.aggregations.nodeCount.value;
}
return 0;
}

View file

@ -1,20 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraNode } from '../../../../graphql/types';
import { InfraBucket, InfraNodeRequestOptions } from '../adapter_types';
import { extractGroupPaths } from './extract_group_paths';
export function convertNodesResponseToGroups(
options: InfraNodeRequestOptions,
nodes: InfraBucket[]
): InfraNode[] {
let results: InfraNode[] = [];
nodes.forEach((node: InfraBucket) => {
const nodesWithPaths = extractGroupPaths(options, node);
results = results.concat(nodesWithPaths);
});
return results;
}

View file

@ -1,13 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraPathInput } from '../../../../graphql/types';
export const createBasePath = (groupBy: InfraPathInput[]) => {
const basePath = ['aggs', 'waffle', 'aggs', 'nodes', 'aggs'];
return groupBy.reduce((acc, group, index) => {
return acc.concat([`path_${index}`, `aggs`]);
}, basePath);
};

View file

@ -1,79 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { get, isNumber, last, max, sum } from 'lodash';
import moment from 'moment';
import { InfraMetricType, InfraNode, InfraNodeMetric } from '../../../../graphql/types';
import { InfraBucket, InfraNodeRequestOptions } from '../adapter_types';
import { getBucketSizeInSeconds } from './get_bucket_size_in_seconds';
// TODO: Break these function into seperate files and expand beyond just documnet count
// In the code below it looks like overkill to split these three functions out
// but in reality the create metrics functions will be different per node type.
const findLastFullBucket = (
bucket: InfraBucket,
bucketSize: number,
options: InfraNodeRequestOptions
): InfraBucket | undefined => {
const { buckets } = bucket.timeseries;
const to = moment.utc(options.timerange.to);
return buckets.reduce((current, item) => {
const itemKey = isNumber(item.key) ? item.key : parseInt(item.key, 10);
const date = moment.utc(itemKey + bucketSize * 1000);
if (!date.isAfter(to) && item.doc_count > 0) {
return item;
}
return current;
}, last(buckets));
};
const getMetricValueFromBucket = (type: InfraMetricType) => (bucket: InfraBucket) => {
const metric = bucket[type];
return (metric && (metric.normalized_value || metric.value)) || 0;
};
function calculateMax(bucket: InfraBucket, type: InfraMetricType) {
const { buckets } = bucket.timeseries;
return max(buckets.map(getMetricValueFromBucket(type))) || 0;
}
function calculateAvg(bucket: InfraBucket, type: InfraMetricType) {
const { buckets } = bucket.timeseries;
return sum(buckets.map(getMetricValueFromBucket(type))) / buckets.length || 0;
}
function createNodeMetrics(
options: InfraNodeRequestOptions,
node: InfraBucket,
bucket: InfraBucket
): InfraNodeMetric {
const { timerange, metric } = options;
const bucketSize = getBucketSizeInSeconds(timerange.interval);
const lastBucket = findLastFullBucket(bucket, bucketSize, options);
if (!lastBucket) {
throw new Error('Date histogram returned an empty set of buckets.');
}
return {
name: metric.type,
value: getMetricValueFromBucket(metric.type)(lastBucket),
max: calculateMax(bucket, metric.type),
avg: calculateAvg(bucket, metric.type),
};
}
export function createNodeItem(
options: InfraNodeRequestOptions,
node: InfraBucket,
bucket: InfraBucket
): InfraNode {
const nodeDetails = get(node, ['nodeDetails', 'buckets', 0]);
return {
metric: createNodeMetrics(options, node, bucket),
path: [{ value: node.key, label: get(nodeDetails, 'key', node.key) }],
} as InfraNode;
}

View file

@ -1,15 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraESSearchBody, InfraProcesorRequestOptions } from '../adapter_types';
import { createLastNProcessor } from '../processors/last';
export function createNodeRequestBody(options: InfraProcesorRequestOptions): InfraESSearchBody {
const requestProcessor = createLastNProcessor(options);
const doc = {};
const body = requestProcessor(doc);
return body;
}

View file

@ -1,49 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { times } from 'lodash';
import { InfraMetricType } from '../../../../graphql/types';
import {
InfraESMSearchBody,
InfraNodeRequestOptions,
InfraNodeType,
InfraProcesorRequestOptions,
} from '../adapter_types';
import { NODE_REQUEST_PARTITION_SIZE } from '../constants';
import { createNodeRequestBody } from './create_node_request_body';
export function createPartitionBodies(
totalNodes: number,
nodeType: InfraNodeType,
nodeField: string,
nodeOptions: InfraNodeRequestOptions
): InfraESMSearchBody[] {
const { sourceConfiguration }: InfraNodeRequestOptions = nodeOptions;
const bodies: InfraESMSearchBody[] = [];
const numberOfPartitions: number = Math.ceil(totalNodes / NODE_REQUEST_PARTITION_SIZE);
const indices =
nodeOptions.metric.type === InfraMetricType.logRate
? sourceConfiguration.logAlias
: sourceConfiguration.metricAlias;
times(
numberOfPartitions,
(partitionId: number): void => {
const processorOptions: InfraProcesorRequestOptions = {
nodeType,
nodeField,
nodeOptions,
numberOfPartitions,
partitionId,
};
bodies.push({
index: indices,
});
bodies.push(createNodeRequestBody(processorOptions));
}
);
return bodies;
}

View file

@ -1,77 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraPathFilterInput, InfraPathInput } from '../../../../graphql/types';
import {
InfraESBoolQuery,
InfraESQuery,
InfraESRangeQuery,
InfraNodeRequestOptions,
} from '../adapter_types';
import { isGroupByFilters, isGroupByTerms } from './type_guards';
export function createQuery(options: InfraNodeRequestOptions): InfraESQuery {
const { timerange, sourceConfiguration, groupBy, filterQuery }: InfraNodeRequestOptions = options;
const mustClause: InfraESQuery[] = [];
const shouldClause: InfraESQuery[] = [];
const filterClause: InfraESQuery[] = [];
const rangeFilter: InfraESRangeQuery = {
range: {
[sourceConfiguration.fields.timestamp]: {
format: 'epoch_millis',
gte: timerange.from,
lte: timerange.to,
},
},
};
filterClause.push(rangeFilter);
if (groupBy) {
groupBy.forEach(
(group: InfraPathInput): void => {
if (isGroupByTerms(group) && group.field) {
mustClause.push({
exists: {
field: group.field,
},
});
}
if (isGroupByFilters(group) && group.filters) {
group.filters!.forEach(
(groupFilter: InfraPathFilterInput | null): void => {
if (groupFilter != null && groupFilter.query) {
shouldClause.push({
query_string: {
analyze_wildcard: true,
query: groupFilter.query,
},
});
}
}
);
}
}
);
}
if (filterQuery) {
mustClause.push(filterQuery);
}
const query: InfraESBoolQuery = {
bool: {
filter: filterClause,
must: mustClause,
should: shouldClause,
},
};
return query;
}

View file

@ -1,54 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraNode, InfraNodePath, InfraPathInput } from '../../../../graphql/types';
import { InfraBucket, InfraNodeRequestOptions } from '../adapter_types';
import { createNodeItem } from './create_node_item';
export interface InfraPathItem {
path: string[];
nodeItem: InfraNode;
}
export function extractGroupPaths(
options: InfraNodeRequestOptions,
node: InfraBucket
): InfraNode[] {
const { groupBy } = options;
const secondGroup: InfraPathInput = groupBy[1];
const paths: InfraNode[] = node.path_0.buckets.reduce(
(acc: InfraNode[], bucket: InfraBucket, index: number): InfraNode[] => {
const key: string = (bucket.key || index).toString();
if (secondGroup) {
return acc.concat(
bucket.path_1.buckets.map(
(b: InfraBucket): InfraNode => {
const innerNode = createNodeItem(options, node, b);
const groupPaths: InfraNodePath[] = [
{ value: bucket.key.toString(), label: bucket.key.toString() },
{ value: b.key.toString(), label: b.key.toString() },
];
const nodePath = groupPaths.concat(innerNode.path);
return {
...innerNode,
path: nodePath,
};
}
)
);
}
const nodeItem = createNodeItem(options, node, bucket);
const currentPath: InfraNodePath[] = [{ value: key, label: key }];
const path = currentPath.concat(nodeItem.path);
return acc.concat({
...nodeItem,
path,
});
},
[]
);
return paths;
}

View file

@ -1,31 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
const intervalUnits = ['y', 'M', 'w', 'd', 'h', 'm', 's', 'ms'];
const INTERVAL_STRING_RE = new RegExp('^([0-9\\.]*)\\s*(' + intervalUnits.join('|') + ')$');
interface UnitsToSeconds {
[unit: string]: number;
}
const units: UnitsToSeconds = {
ms: 0.001,
s: 1,
m: 60,
h: 3600,
d: 86400,
w: 86400 * 7,
M: 86400 * 30,
y: 86400 * 356,
};
export const getBucketSizeInSeconds = (interval: string): number => {
const matches = interval.match(INTERVAL_STRING_RE);
if (matches) {
return parseFloat(matches[1]) * units[matches[2]];
}
throw new Error('Invalid interval string format.');
};

View file

@ -1,26 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraNode } from '../../../../graphql/types';
import { InfraBucket, InfraNodeRequestOptions } from '../adapter_types';
import { convertNodesResponseToGroups } from './convert_nodes_response_to_groups';
import { createNodeItem } from './create_node_item';
export function processNodes(options: InfraNodeRequestOptions, nodes: any[]): InfraNode[] {
if (options.groupBy.length === 0) {
// If there are NO group by options then we need to return a
// nodes only response
const nodeResults: InfraNode[] = nodes.map(
(node: InfraBucket): InfraNode => {
return createNodeItem(options, node, node);
}
);
return nodeResults;
}
// Return a grouped response
return convertNodesResponseToGroups(options, nodes);
}

View file

@ -1,16 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraPathInput, InfraPathType } from '../../../../graphql/types';
import { InfraGroupByFilters, InfraGroupByTerms } from '../adapter_types';
export function isGroupByFilters(value: InfraPathInput): value is InfraGroupByFilters {
return value.type === InfraPathType.filters;
}
export function isGroupByTerms(value: InfraPathInput): value is InfraGroupByTerms {
return value.type === InfraPathType.terms;
}

View file

@ -1,22 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraNodeMetricFn, InfraNodeType } from '../adapter_types';
export const count: InfraNodeMetricFn = (nodeType: InfraNodeType) => {
return {
count: {
bucket_script: {
buckets_path: { count: '_count' },
script: {
source: 'count * 1',
lang: 'expression',
},
gap_policy: 'skip',
},
},
};
};

View file

@ -1,58 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraNodeMetricFn, InfraNodeType } from '../adapter_types';
const FIELDS = {
[InfraNodeType.host]: 'system.cpu.user.pct',
[InfraNodeType.pod]: 'kubernetes.pod.cpu.usage.node.pct',
[InfraNodeType.container]: 'docker.cpu.total.pct',
};
export const cpu: InfraNodeMetricFn = (nodeType: InfraNodeType) => {
if (nodeType === InfraNodeType.host) {
return {
cpu_user: {
avg: {
field: 'system.cpu.user.pct',
},
},
cpu_system: {
avg: {
field: 'system.cpu.system.pct',
},
},
cpu_cores: {
max: {
field: 'system.cpu.cores',
},
},
cpu: {
bucket_script: {
buckets_path: {
user: 'cpu_user',
system: 'cpu_system',
cores: 'cpu_cores',
},
script: {
source: '(params.user + params.system) / params.cores',
lang: 'painless',
},
gap_policy: 'skip',
},
},
};
}
const field = FIELDS[nodeType];
return {
cpu: {
avg: {
field,
},
},
};
};

View file

@ -1,24 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraMetricType } from '../../../../graphql/types';
import { count } from './count';
import { cpu } from './cpu';
import { load } from './load';
import { logRate } from './log_rate';
import { memory } from './memory';
import { rx } from './rx';
import { tx } from './tx';
export const metricAggregationCreators = {
[InfraMetricType.count]: count,
[InfraMetricType.cpu]: cpu,
[InfraMetricType.memory]: memory,
[InfraMetricType.rx]: rx,
[InfraMetricType.tx]: tx,
[InfraMetricType.load]: load,
[InfraMetricType.logRate]: logRate,
};

View file

@ -1,20 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraNodeMetricFn, InfraNodeType } from '../adapter_types';
const FIELDS = {
[InfraNodeType.host]: 'system.load.5',
[InfraNodeType.pod]: '',
[InfraNodeType.container]: '',
};
export const load: InfraNodeMetricFn = (nodeType: InfraNodeType) => {
const field = FIELDS[nodeType];
if (field) {
return { load: { avg: { field } } };
}
};

View file

@ -1,34 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraNodeMetricFn, InfraNodeType } from '../adapter_types';
export const logRate: InfraNodeMetricFn = (nodeType: InfraNodeType) => {
return {
count: {
bucket_script: {
buckets_path: { count: '_count' },
script: {
source: 'count * 1',
lang: 'expression',
},
gap_policy: 'skip',
},
},
cumsum: {
cumulative_sum: {
buckets_path: 'count',
},
},
logRate: {
derivative: {
buckets_path: 'cumsum',
gap_policy: 'skip',
unit: '1s',
},
},
};
};

View file

@ -1,17 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraNodeMetricFn, InfraNodeType } from '../adapter_types';
const FIELDS = {
[InfraNodeType.host]: 'system.memory.actual.used.pct',
[InfraNodeType.pod]: 'kubernetes.pod.memory.usage.node.pct',
[InfraNodeType.container]: 'docker.memory.usage.pct',
};
export const memory: InfraNodeMetricFn = (nodeType: InfraNodeType) => {
const field = FIELDS[nodeType];
return { memory: { avg: { field } } };
};

View file

@ -1,41 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraNodeMetricFn, InfraNodeType } from '../adapter_types';
interface Fields {
[InfraNodeType.container]: string;
[InfraNodeType.pod]: string;
[InfraNodeType.host]: string;
}
export const rate = (id: string, fields: Fields): InfraNodeMetricFn => (
nodeType: InfraNodeType
) => {
const field = fields[nodeType];
if (field) {
return {
[`${id}_max`]: { max: { field } },
[`${id}_deriv`]: {
derivative: {
buckets_path: `${id}_max`,
gap_policy: 'skip',
unit: '1s',
},
},
[id]: {
bucket_script: {
buckets_path: { value: `${id}_deriv[normalized_value]` },
script: {
source: 'params.value > 0.0 ? params.value : 0.0',
lang: 'painless',
},
gap_policy: 'skip',
},
},
};
}
};

View file

@ -1,15 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraNodeType } from '../adapter_types';
import { rate } from './rate';
const FIELDS = {
[InfraNodeType.host]: 'system.network.in.bytes',
[InfraNodeType.pod]: 'kubernetes.pod.network.rx.bytes',
[InfraNodeType.container]: 'docker.network.in.bytes',
};
export const rx = rate('rx', FIELDS);

View file

@ -1,16 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraNodeType } from '../adapter_types';
import { rate } from './rate';
const FIELDS = {
[InfraNodeType.host]: 'system.network.out.bytes',
[InfraNodeType.pod]: 'kubernetes.pod.network.tx.bytes',
[InfraNodeType.container]: 'docker.network.out.bytes',
};
export const tx = rate('tx', FIELDS);

View file

@ -1,24 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { cloneDeep, set } from 'lodash';
import { InfraESSearchBody, InfraProcesorRequestOptions } from '../../adapter_types';
export const fieldsFilterProcessor = (options: InfraProcesorRequestOptions) => {
return (doc: InfraESSearchBody) => {
const result = cloneDeep(doc);
/*
TODO: Need to add the filter logic to find all the fields the user is requesting
and then add an exists filter for each. That way we are only looking at documents
that have the correct fields. This is because we are having to run a partioned
terms agg at the top level. Normally we wouldn't need to do this because they would
get filter out natually.
*/
set(result, 'aggs.waffle.filter.match_all', {});
return result;
};
};

View file

@ -1,58 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { cloneDeep, get, set } from 'lodash';
import { InfraPathFilterInput, InfraPathInput } from '../../../../../graphql/types';
import {
InfraESQueryStringQuery,
InfraESSearchBody,
InfraProcesorRequestOptions,
} from '../../adapter_types';
import { isGroupByFilters, isGroupByTerms } from '../../lib/type_guards';
export const groupByProcessor = (options: InfraProcesorRequestOptions) => {
return (doc: InfraESSearchBody) => {
const result = cloneDeep(doc);
const { groupBy } = options.nodeOptions;
let aggs = get(result, 'aggs.waffle.aggs.nodes.aggs', {});
set(result, 'aggs.waffle.aggs.nodes.aggs', aggs);
groupBy.forEach((grouping: InfraPathInput, index: number) => {
if (isGroupByTerms(grouping)) {
const termsAgg = {
aggs: {},
terms: {
field: grouping.field,
size: 10,
},
};
set(aggs, `path_${index}`, termsAgg);
aggs = termsAgg.aggs;
}
if (grouping && isGroupByFilters(grouping)) {
const filtersAgg = {
aggs: {},
filters: {
filters: grouping.filters!.map(
(filter: InfraPathFilterInput): InfraESQueryStringQuery => {
return {
query_string: {
analyze_wildcard: true,
query: (filter && filter.query) || '*',
},
};
}
),
},
};
set(aggs, `path_${index}`, filtersAgg);
aggs = filtersAgg.aggs;
}
});
return result;
};
};

View file

@ -1,53 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { cloneDeep, set } from 'lodash';
import { InfraESSearchBody, InfraNodeType, InfraProcesorRequestOptions } from '../../adapter_types';
import {
NAME_FIELDS,
NODE_REQUEST_PARTITION_FACTOR,
NODE_REQUEST_PARTITION_SIZE,
} from '../../constants';
const nodeTypeToField = (options: InfraProcesorRequestOptions): string => {
const { fields } = options.nodeOptions.sourceConfiguration;
switch (options.nodeType) {
case InfraNodeType.pod:
return fields.pod;
case InfraNodeType.container:
return fields.container;
default:
return fields.host;
}
};
export const nodesProcessor = (options: InfraProcesorRequestOptions) => {
return (doc: InfraESSearchBody) => {
const result = cloneDeep(doc);
const field = nodeTypeToField(options);
set(result, 'aggs.waffle.aggs.nodes.terms', {
field,
include: {
num_partitions: options.numberOfPartitions,
partition: options.partitionId,
},
order: { _key: 'asc' },
size: NODE_REQUEST_PARTITION_SIZE * NODE_REQUEST_PARTITION_FACTOR,
});
set(result, 'aggs.waffle.aggs.nodes.aggs', {
nodeDetails: {
terms: {
field: NAME_FIELDS[options.nodeType],
size: 1,
},
},
});
return result;
};
};

View file

@ -1,19 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { cloneDeep, set } from 'lodash';
import { InfraESSearchBody, InfraProcesorRequestOptions } from '../../adapter_types';
import { createQuery } from '../../lib/create_query';
export const queryProcessor = (options: InfraProcesorRequestOptions) => {
return (doc: InfraESSearchBody) => {
const result = cloneDeep(doc);
set(result, 'size', 0);
set(result, 'query', createQuery(options.nodeOptions));
return result;
};
};

View file

@ -1,44 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { cloneDeep, set } from 'lodash';
import { InfraESSearchBody, InfraProcesorRequestOptions } from '../../adapter_types';
import { createBasePath } from '../../lib/create_base_path';
import { getBucketSizeInSeconds } from '../../lib/get_bucket_size_in_seconds';
export function getBucketKey(value: number, interval: number, offset = 0) {
return Math.floor((value - offset) / interval) * interval + offset;
}
export const calculateOffsetInSeconds = (end: number, interval: number) => {
const bucketKey = getBucketKey(end, interval);
return Math.floor(end - interval - bucketKey);
};
export const dateHistogramProcessor = (options: InfraProcesorRequestOptions) => {
return (doc: InfraESSearchBody) => {
const result = cloneDeep(doc);
const { timerange, sourceConfiguration, groupBy } = options.nodeOptions;
const bucketSizeInSeconds = getBucketSizeInSeconds(timerange.interval);
const path = createBasePath(groupBy).concat('timeseries');
const bucketOffset = calculateOffsetInSeconds(timerange.from, bucketSizeInSeconds);
const offset = `${Math.floor(bucketOffset)}s`;
set(result, path, {
date_histogram: {
field: sourceConfiguration.fields.timestamp,
interval: timerange.interval,
min_doc_count: 0,
offset,
extended_bounds: {
min: timerange.from,
max: timerange.to,
},
},
aggs: {},
});
return result;
};
};

View file

@ -1,31 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { pipe } from 'lodash/fp';
import {
InfraESSearchBody,
InfraProcesorRequestOptions,
InfraProcessorTransformer,
} from '../../adapter_types';
import { fieldsFilterProcessor } from '../common/field_filter_processor';
import { groupByProcessor } from '../common/group_by_processor';
import { nodesProcessor } from '../common/nodes_processor';
import { queryProcessor } from '../common/query_procssor';
import { dateHistogramProcessor } from './date_histogram_processor';
import { metricBucketsProcessor } from './metric_buckets_processor';
export const createLastNProcessor = (
options: InfraProcesorRequestOptions
): InfraProcessorTransformer<InfraESSearchBody> => {
return pipe(
fieldsFilterProcessor(options),
nodesProcessor(options),
queryProcessor(options),
groupByProcessor(options),
dateHistogramProcessor(options),
metricBucketsProcessor(options)
);
};

View file

@ -1,22 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { cloneDeep, set } from 'lodash';
import { InfraESSearchBody, InfraProcesorRequestOptions } from '../../adapter_types';
import { createBasePath } from '../../lib/create_base_path';
import { metricAggregationCreators } from '../../metric_aggregation_creators';
export const metricBucketsProcessor = (options: InfraProcesorRequestOptions) => {
return (doc: InfraESSearchBody) => {
const result = cloneDeep(doc);
const { metric, groupBy } = options.nodeOptions;
const path = createBasePath(groupBy).concat(['timeseries', 'aggs']);
const aggregationCreator = metricAggregationCreators[metric.type];
const aggs = aggregationCreator(options.nodeType);
set(result, path, aggs);
return result;
};
};

View file

@ -12,13 +12,11 @@ import { InfraKibanaBackendFrameworkAdapter } from '../adapters/framework/kibana
import { InfraKibanaLogEntriesAdapter } from '../adapters/log_entries/kibana_log_entries_adapter';
import { ElasticsearchMetadataAdapter } from '../adapters/metadata/elasticsearch_metadata_adapter';
import { KibanaMetricsAdapter } from '../adapters/metrics/kibana_metrics_adapter';
import { ElasticsearchNodesAdapter } from '../adapters/nodes/elasticsearch_nodes_adapter';
import { InfraElasticsearchSourceStatusAdapter } from '../adapters/source_status';
import { InfraFieldsDomain } from '../domains/fields_domain';
import { InfraLogEntriesDomain } from '../domains/log_entries_domain';
import { InfraMetadataDomain } from '../domains/metadata_domain';
import { InfraMetricsDomain } from '../domains/metrics_domain';
import { InfraNodesDomain } from '../domains/nodes_domain';
import { InfraBackendLibs, InfraDomainLibs } from '../infra_types';
import { InfraSnapshot } from '../snapshot';
import { InfraSourceStatus } from '../source_status';
@ -46,7 +44,6 @@ export function compose(server: Server): InfraBackendLibs {
logEntries: new InfraLogEntriesDomain(new InfraKibanaLogEntriesAdapter(framework), {
sources,
}),
nodes: new InfraNodesDomain(new ElasticsearchNodesAdapter(framework)),
metrics: new InfraMetricsDomain(new KibanaMetricsAdapter(framework)),
};

View file

@ -4,10 +4,12 @@
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraNodeType } from '../../../graphql/types';
// TODO: Make NODE_REQUEST_PARTITION_SIZE configurable from kibana.yml
export const NODE_REQUEST_PARTITION_SIZE = 75;
export const NODE_REQUEST_PARTITION_FACTOR = 1.2;
import { InfraNodeType } from '../graphql/types';
// Used for metadata and snapshots resolvers to find the field that contains
// a displayable name of a node.
// Intentionally not the same as xpack.infra.sources.default.fields.{host,container,pod}.
// TODO: consider moving this to source configuration too.
export const NAME_FIELDS = {
[InfraNodeType.host]: 'host.name',
[InfraNodeType.pod]: 'kubernetes.pod.name',

View file

@ -1,24 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { InfraNode } from '../../graphql/types';
import { InfraFrameworkRequest } from '../adapters/framework';
import { InfraNodeRequestOptions, InfraNodesAdapter } from '../adapters/nodes';
export class InfraNodesDomain {
private adapter: InfraNodesAdapter;
constructor(adapter: InfraNodesAdapter) {
this.adapter = adapter;
}
public async getNodes(
req: InfraFrameworkRequest,
options: InfraNodeRequestOptions
): Promise<InfraNode[]> {
return await this.adapter.getNodes(req, options);
}
}

View file

@ -11,7 +11,6 @@ import { InfraFieldsDomain } from './domains/fields_domain';
import { InfraLogEntriesDomain } from './domains/log_entries_domain';
import { InfraMetadataDomain } from './domains/metadata_domain';
import { InfraMetricsDomain } from './domains/metrics_domain';
import { InfraNodesDomain } from './domains/nodes_domain';
import { InfraSnapshot } from './snapshot';
import { InfraSourceStatus } from './source_status';
import { InfraSources } from './sources';
@ -20,7 +19,6 @@ export interface InfraDomainLibs {
metadata: InfraMetadataDomain;
fields: InfraFieldsDomain;
logEntries: InfraLogEntriesDomain;
nodes: InfraNodesDomain;
metrics: InfraMetricsDomain;
}

View file

@ -6,21 +6,36 @@
import { metricAggregationCreators } from './metric_aggregation_creators';
import { InfraSnapshotRequestOptions } from './snapshot';
import { NAME_FIELDS } from '../constants';
import { getIntervalInSeconds } from '../../utils/get_interval_in_seconds';
export const getGroupedNodesSources = (options: InfraSnapshotRequestOptions) => {
const sources = options.groupBy.map(gb => {
return { [`${gb.field}`]: { terms: { field: gb.field } } };
});
sources.push({
node: { terms: { field: options.sourceConfiguration.fields[options.nodeType] } },
id: { terms: { field: options.sourceConfiguration.fields[options.nodeType] } },
});
sources.push({
name: { terms: { field: NAME_FIELDS[options.nodeType] } },
});
return sources;
};
export const getMetricsSources = (options: InfraSnapshotRequestOptions) => {
return [{ node: { terms: { field: options.sourceConfiguration.fields[options.nodeType] } } }];
return [{ id: { terms: { field: options.sourceConfiguration.fields[options.nodeType] } } }];
};
export const getMetricsAggregations = (options: InfraSnapshotRequestOptions) => {
return metricAggregationCreators[options.metric.type](options.nodeType);
};
export const getDateHistogramOffset = (options: InfraSnapshotRequestOptions): string => {
const { from, interval } = options.timerange;
const fromInSeconds = Math.floor(from / 1000);
const bucketSizeInSeconds = getIntervalInSeconds(interval);
// negative offset to align buckets with full intervals (e.g. minutes)
const offset = (fromInSeconds % bucketSizeInSeconds) - bucketSizeInSeconds;
return `${offset}s`;
};

View file

@ -16,7 +16,7 @@ import { getIntervalInSeconds } from '../../utils/get_interval_in_seconds';
import { InfraSnapshotRequestOptions } from './snapshot';
export interface InfraSnapshotNodeMetricsBucket {
key: { node: string };
key: { id: string };
histogram: {
buckets: InfraSnapshotMetricsBucket[];
};
@ -40,7 +40,8 @@ export type InfraSnapshotMetricsBucket = InfraSnapshotBucketWithKey & InfraSnaps
export interface InfraSnapshotNodeGroupByBucket {
key: {
node: string;
id: string;
name: string;
[groupByField: string]: string;
};
}
@ -53,19 +54,19 @@ export const getNodePath = (
const path = options.groupBy.map(gb => {
return { value: node[`${gb.field}`], label: node[`${gb.field}`] };
});
path.push({ value: node.node, label: node.node });
path.push({ value: node.id, label: node.name });
return path;
};
interface NodeMetricsForLookup {
[node: string]: InfraSnapshotMetricsBucket[];
[nodeId: string]: InfraSnapshotMetricsBucket[];
}
export const getNodeMetricsForLookup = (
metrics: InfraSnapshotNodeMetricsBucket[]
): NodeMetricsForLookup => {
return metrics.reduce((acc: NodeMetricsForLookup, metric) => {
acc[`${metric.key.node}`] = metric.histogram.buckets;
acc[`${metric.key.id}`] = metric.histogram.buckets;
return acc;
}, {});
};

View file

@ -17,7 +17,12 @@ import { InfraSources } from '../sources';
import { JsonObject } from '../../../common/typed_json';
import { SNAPSHOT_COMPOSITE_REQUEST_SIZE } from './constants';
import { getGroupedNodesSources, getMetricsAggregations, getMetricsSources } from './query_helpers';
import {
getGroupedNodesSources,
getMetricsAggregations,
getMetricsSources,
getDateHistogramOffset,
} from './query_helpers';
import {
getNodeMetrics,
getNodeMetricsForLookup,
@ -121,7 +126,6 @@ const requestNodeMetrics = async (
query: {
bool: {
filter: [
...createQueryFilterClauses(options.filterQuery),
{
range: {
[options.sourceConfiguration.fields.timestamp]: {
@ -146,6 +150,11 @@ const requestNodeMetrics = async (
date_histogram: {
field: options.sourceConfiguration.fields.timestamp,
interval: options.timerange.interval || '1m',
offset: getDateHistogramOffset(options),
extended_bounds: {
min: options.timerange.from,
max: options.timerange.to,
},
},
aggregations: getMetricsAggregations(options),
},
@ -221,7 +230,7 @@ const mergeNodeBuckets = (
return nodeGroupByBuckets.map(node => {
return {
path: getNodePath(node, options),
metric: getNodeMetrics(nodeMetricsForLookup[node.key.node], options),
metric: getNodeMetrics(nodeMetricsForLookup[node.key.id], options),
};
});
};

View file

@ -35,14 +35,15 @@ const waffleTests: KbnTestProvider = ({ getService }) => {
interval: '1m',
},
metric: { type: 'cpu' },
path: [{ type: 'containers' }],
type: 'container',
groupBy: [],
},
})
.then(resp => {
const { map } = resp.data.source;
expect(map).to.have.property('nodes');
if (map) {
const { nodes } = map;
const { snapshot } = resp.data.source;
expect(snapshot).to.have.property('nodes');
if (snapshot) {
const { nodes } = snapshot;
expect(nodes.length).to.equal(5);
const firstNode = first(nodes);
expect(firstNode).to.have.property('path');
@ -61,7 +62,7 @@ const waffleTests: KbnTestProvider = ({ getService }) => {
value: 0,
max: 0,
avg: 0,
__typename: 'InfraNodeMetric',
__typename: 'InfraSnapshotNodeMetric',
});
}
});
@ -85,14 +86,15 @@ const waffleTests: KbnTestProvider = ({ getService }) => {
interval: '1m',
},
metric: { type: 'cpu' },
path: [{ type: 'hosts' }],
type: 'host',
groupBy: [],
},
})
.then(resp => {
const { map } = resp.data.source;
expect(map).to.have.property('nodes');
if (map) {
const { nodes } = map;
const { snapshot } = resp.data.source;
expect(snapshot).to.have.property('nodes');
if (snapshot) {
const { nodes } = snapshot;
expect(nodes.length).to.equal(1);
const firstNode = first(nodes);
expect(firstNode).to.have.property('path');
@ -102,10 +104,10 @@ const waffleTests: KbnTestProvider = ({ getService }) => {
expect(firstNode).to.have.property('metric');
expect(firstNode.metric).to.eql({
name: 'cpu',
value: 0.0035,
avg: 0.009066666666666666,
max: 0.0684,
__typename: 'InfraNodeMetric',
value: 0.003666666666666667,
avg: 0.00809090909090909,
max: 0.057833333333333334,
__typename: 'InfraSnapshotNodeMetric',
});
}
});
@ -123,14 +125,15 @@ const waffleTests: KbnTestProvider = ({ getService }) => {
interval: '1m',
},
metric: { type: 'cpu' },
path: [{ type: 'terms', field: 'cloud.availability_zone' }, { type: 'hosts' }],
type: 'host',
groupBy: [{ field: 'cloud.availability_zone' }],
},
})
.then(resp => {
const { map } = resp.data.source;
expect(map).to.have.property('nodes');
if (map) {
const { nodes } = map;
const { snapshot } = resp.data.source;
expect(snapshot).to.have.property('nodes');
if (snapshot) {
const { nodes } = snapshot;
expect(nodes.length).to.equal(1);
const firstNode = first(nodes);
expect(firstNode).to.have.property('path');
@ -141,7 +144,7 @@ const waffleTests: KbnTestProvider = ({ getService }) => {
});
});
it('should basically work with 2 grouping', () => {
it('should basically work with 2 groupings', () => {
return client
.query<WaffleNodesQuery.Query>({
query: waffleNodesQuery,
@ -153,18 +156,15 @@ const waffleTests: KbnTestProvider = ({ getService }) => {
interval: '1m',
},
metric: { type: 'cpu' },
path: [
{ type: 'terms', field: 'cloud.provider' },
{ type: 'terms', field: 'cloud.availability_zone' },
{ type: 'hosts' },
],
type: 'host',
groupBy: [{ field: 'cloud.provider' }, { field: 'cloud.availability_zone' }],
},
})
.then(resp => {
const { map } = resp.data.source;
expect(map).to.have.property('nodes');
if (map) {
const { nodes } = map;
const { snapshot } = resp.data.source;
expect(snapshot).to.have.property('nodes');
if (snapshot) {
const { nodes } = snapshot;
expect(nodes.length).to.equal(1);
const firstNode = first(nodes);
expect(firstNode).to.have.property('path');
@ -175,6 +175,58 @@ const waffleTests: KbnTestProvider = ({ getService }) => {
}
});
});
it('should show metrics for all nodes when grouping by service type', () => {
return client
.query<WaffleNodesQuery.Query>({
query: waffleNodesQuery,
variables: {
sourceId: 'default',
timerange: {
to: max,
from: min,
interval: '1m',
},
metric: { type: 'cpu' },
type: 'host',
groupBy: [{ field: 'service.type' }],
},
})
.then(resp => {
const { snapshot } = resp.data.source;
expect(snapshot).to.have.property('nodes');
if (snapshot) {
const { nodes } = snapshot;
expect(nodes.length).to.equal(2);
const firstNode = nodes[0];
expect(firstNode).to.have.property('path');
expect(firstNode.path.length).to.equal(2);
expect(firstNode.path[0]).to.have.property('value', 'mysql');
expect(firstNode.path[1]).to.have.property('value', 'demo-stack-mysql-01');
expect(firstNode).to.have.property('metric');
expect(firstNode.metric).to.eql({
name: 'cpu',
value: 0.003666666666666667,
avg: 0.00809090909090909,
max: 0.057833333333333334,
__typename: 'InfraSnapshotNodeMetric',
});
const secondNode = nodes[1];
expect(secondNode).to.have.property('path');
expect(secondNode.path.length).to.equal(2);
expect(secondNode.path[0]).to.have.property('value', 'system');
expect(secondNode.path[1]).to.have.property('value', 'demo-stack-mysql-01');
expect(secondNode).to.have.property('metric');
expect(secondNode.metric).to.eql({
name: 'cpu',
value: 0.003666666666666667,
avg: 0.00809090909090909,
max: 0.057833333333333334,
__typename: 'InfraSnapshotNodeMetric',
});
}
});
});
});
});
};