Resolver component w/ sample data (#53619)

Resolver is a map. It shows processes that ran on a computer. The processes are drawn as nodes and lines connect processes with their parents.

Resolver is not yet implemented in Kibana. This PR adds a 'map' type UX. The user can click and drag to pan the map and zoom using trackpad pinching (or ctrl and mousewheel.)

There is no code providing actual data. Sample data is included. The sample data is used to draw a map. The fundamental info needed is:

process names
the parent of a process
With this info we can topologically lay out the processes. The sample data isn't yet in a realistic format. We'll be fixing that soon.

Related issue: elastic/endpoint-app-team#30
This commit is contained in:
Robert Austin 2020-01-14 14:56:16 -05:00 committed by GitHub
parent c622a2ffa2
commit 387da985ee
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
44 changed files with 4862 additions and 47 deletions

View file

@ -0,0 +1,14 @@
{
"author": "Elastic",
"name": "endpoint",
"version": "0.0.0",
"private": true,
"license": "Elastic-License",
"scripts": {},
"dependencies": {
"react-redux": "^7.1.0"
},
"devDependencies": {
"@types/react-redux": "^7.1.0"
}
}

View file

@ -0,0 +1,9 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { CameraAction } from './store/camera';
import { DataAction } from './store/data';
export type ResolverAction = CameraAction | DataAction;

View file

@ -4,31 +4,32 @@
* you may not use this file except in compliance with the Elastic License.
*/
import {
EmbeddableInput,
IContainer,
Embeddable,
} from '../../../../../../src/plugins/embeddable/public';
import ReactDOM from 'react-dom';
import React from 'react';
import { AppRoot } from './view';
import { storeFactory } from './store';
import { Embeddable } from '../../../../../../src/plugins/embeddable/public';
export class ResolverEmbeddable extends Embeddable {
public readonly type = 'resolver';
constructor(initialInput: EmbeddableInput, parent?: IContainer) {
super(
// Input state is irrelevant to this embeddable, just pass it along.
initialInput,
// Initial output state - this embeddable does not do anything with output, so just
// pass along an empty object.
{},
// Optional parent component, this embeddable can optionally be rendered inside a container.
parent
);
}
private lastRenderTarget?: Element;
public render(node: HTMLElement) {
node.innerHTML = '<div data-test-subj="resolverEmbeddable">Welcome from Resolver</div>';
if (this.lastRenderTarget !== undefined) {
ReactDOM.unmountComponentAtNode(this.lastRenderTarget);
}
this.lastRenderTarget = node;
const { store } = storeFactory();
ReactDOM.render(<AppRoot store={store} />, node);
}
public reload(): void {
throw new Error('Method not implemented.');
}
public destroy(): void {
if (this.lastRenderTarget !== undefined) {
ReactDOM.unmountComponentAtNode(this.lastRenderTarget);
}
}
}

View file

@ -5,12 +5,12 @@
*/
import { i18n } from '@kbn/i18n';
import { ResolverEmbeddable } from './';
import {
EmbeddableFactory,
EmbeddableInput,
IContainer,
EmbeddableInput,
} from '../../../../../../src/plugins/embeddable/public';
import { ResolverEmbeddable } from './embeddable';
export class ResolverEmbeddableFactory extends EmbeddableFactory {
public readonly type = 'resolver';
@ -20,7 +20,7 @@ export class ResolverEmbeddableFactory extends EmbeddableFactory {
}
public async create(initialInput: EmbeddableInput, parent?: IContainer) {
return new ResolverEmbeddable(initialInput, parent);
return new ResolverEmbeddable(initialInput, {}, parent);
}
public getDisplayName() {

View file

@ -0,0 +1,12 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
/**
* Return `value` unless it is less than `minimum`, in which case return `minimum` or unless it is greater than `maximum`, in which case return `maximum`.
*/
export function clamp(value: number, minimum: number, maximum: number) {
return Math.max(Math.min(value, maximum), minimum);
}

View file

@ -0,0 +1,21 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { multiply } from './matrix3';
describe('matrix3', () => {
it('can multiply two matrix3s', () => {
expect(multiply([1, 2, 3, 4, 5, 6, 7, 8, 9], [10, 11, 12, 13, 14, 15, 16, 17, 18])).toEqual([
84,
90,
96,
201,
216,
231,
318,
342,
366,
]);
});
});

View file

@ -0,0 +1,56 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { Matrix3 } from '../types';
/**
* Return a new matrix which is the product of the first and second matrix.
*/
export function multiply(
[a11, a12, a13, a21, a22, a23, a31, a32, a33]: Matrix3,
[b11, b12, b13, b21, b22, b23, b31, b32, b33]: Matrix3
): Matrix3 {
const s11 = a11 * b11 + a12 * b21 + a13 * b31;
const s12 = a11 * b12 + a12 * b22 + a13 * b32;
const s13 = a11 * b13 + a12 * b23 + a13 * b33;
const s21 = a21 * b11 + a22 * b21 + a23 * b31;
const s22 = a21 * b12 + a22 * b22 + a23 * b32;
const s23 = a21 * b13 + a22 * b23 + a23 * b33;
const s31 = a31 * b11 + a32 * b21 + a33 * b31;
const s32 = a31 * b12 + a32 * b22 + a33 * b32;
const s33 = a31 * b13 + a32 * b23 + a33 * b33;
// prettier-ignore
return [
s11, s12, s13,
s21, s22, s23,
s31, s32, s33,
];
}
/**
* Return a new matrix which is the sum of the two passed in.
*/
export function add(
[a11, a12, a13, a21, a22, a23, a31, a32, a33]: Matrix3,
[b11, b12, b13, b21, b22, b23, b31, b32, b33]: Matrix3
): Matrix3 {
return [
a11 + b11,
a12 + b12,
a13 + b13,
a21 + b21,
a22 + b22,
a23 + b23,
a31 + b31,
a32 + b32,
a33 + b33,
];
}

View file

@ -0,0 +1,14 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { applyMatrix3 } from './vector2';
import { scalingTransformation } from './transformation';
describe('transforms', () => {
it('applying a scale matrix to a vector2 can invert the y value', () => {
expect(applyMatrix3([1, 2], scalingTransformation([1, -1]))).toEqual([1, -2]);
});
});

View file

@ -0,0 +1,73 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { Matrix3, Vector2 } from '../types';
/**
* The inverse of `orthographicProjection`.
*/
export function inverseOrthographicProjection(
top: number,
right: number,
bottom: number,
left: number
): Matrix3 {
const m11 = (right - left) / 2;
const m13 = (right + left) / (right - left);
const m22 = (top - bottom) / 2;
const m23 = (top + bottom) / (top - bottom);
return [m11, 0, m13, 0, m22, m23, 0, 0, 0];
}
/**
* Adjust x, y to be bounded, in scale, of a clipping plane defined by top, right, bottom, left.
*
* See explanation:
* https://www.scratchapixel.com/lessons/3d-basic-rendering/perspective-and-orthographic-projection-matrix
* https://en.wikipedia.org/wiki/Orthographic_projection
*/
export function orthographicProjection(
top: number,
right: number,
bottom: number,
left: number
): Matrix3 {
const m11 = 2 / (right - left); // adjust x scale to match ndc (-1, 1) bounds
const m13 = -((right + left) / (right - left));
const m22 = 2 / (top - bottom); // adjust y scale to match ndc (-1, 1) bounds
const m23 = -((top + bottom) / (top - bottom));
return [m11, 0, m13, 0, m22, m23, 0, 0, 0];
}
/**
* Returns a 2D transformation matrix that when applied to a vector will scale the vector by `x` and `y` in their respective axises.
* See https://en.wikipedia.org/wiki/Scaling_(geometry)#Matrix_representation
*/
export function scalingTransformation([x, y]: Vector2): Matrix3 {
// prettier-ignore
return [
x, 0, 0,
0, y, 0,
0, 0, 0
]
}
/**
* Returns a 2D transformation matrix that when applied to a vector will translate by `x` and `y` in their respective axises.
* See https://en.wikipedia.org/wiki/Translation_(geometry)#Matrix_representation
*/
export function translationTransformation([x, y]: Vector2): Matrix3 {
// prettier-ignore
return [
1, 0, x,
0, 1, y,
0, 0, 1
]
}

View file

@ -0,0 +1,35 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
/**
* Sequences a tree, yielding children returned by the `children` function. Sequencing is done in 'depth first preorder' fashion. See https://en.wikipedia.org/wiki/Tree_traversal#Pre-order_(NLR)
*/
export function* depthFirstPreorder<T>(root: T, children: (parent: T) => T[]): Iterable<T> {
const nodesToVisit = [root];
while (nodesToVisit.length !== 0) {
const currentNode = nodesToVisit.shift();
if (currentNode !== undefined) {
nodesToVisit.unshift(...(children(currentNode) || []));
yield currentNode;
}
}
}
/**
* Sequences a tree, yielding children returned by the `children` function. Sequencing is done in 'level order' fashion.
*/
export function* levelOrder<T>(root: T, children: (parent: T) => T[]): Iterable<T> {
let level = [root];
while (level.length !== 0) {
let nextLevel = [];
for (const node of level) {
yield node;
nextLevel.push(...(children(node) || []));
}
level = nextLevel;
nextLevel = [];
}
}

View file

@ -0,0 +1,52 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { Vector2, Matrix3 } from '../types';
/**
* Returns a vector which is the sum of `a` and `b`.
*/
export function add(a: Vector2, b: Vector2): Vector2 {
return [a[0] + b[0], a[1] + b[1]];
}
/**
* Returns a vector which is the difference of `a` and `b`.
*/
export function subtract(a: Vector2, b: Vector2): Vector2 {
return [a[0] - b[0], a[1] - b[1]];
}
/**
* Returns a vector which is the quotient of `a` and `b`.
*/
export function divide(a: Vector2, b: Vector2): Vector2 {
return [a[0] / b[0], a[1] / b[1]];
}
/**
* Returns a vector which is the result of applying a 2D transformation matrix to the provided vector.
*/
export function applyMatrix3([x, y]: Vector2, [m11, m12, m13, m21, m22, m23]: Matrix3): Vector2 {
return [x * m11 + y * m12 + m13, x * m21 + y * m22 + m23];
}
/**
* Returns the distance between two vectors
*/
export function distance(a: Vector2, b: Vector2) {
const [x1, y1] = a;
const [x2, y2] = b;
return Math.sqrt(Math.pow(x2 - x1, 2) + Math.pow(y2 - y1, 2));
}
/**
* Returns the angle between two vectors
*/
export function angle(a: Vector2, b: Vector2) {
const deltaX = b[0] - a[0];
const deltaY = b[1] - a[1];
return Math.atan2(deltaY, deltaX);
}

View file

@ -0,0 +1,88 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { uniquePidForProcess, uniqueParentPidForProcess } from './process_event';
import { IndexedProcessTree, ProcessEvent } from '../types';
import { levelOrder as baseLevelOrder } from '../lib/tree_sequencers';
/**
* Create a new IndexedProcessTree from an array of ProcessEvents
*/
export function factory(processes: ProcessEvent[]): IndexedProcessTree {
const idToChildren = new Map<number | undefined, ProcessEvent[]>();
const idToValue = new Map<number, ProcessEvent>();
for (const process of processes) {
idToValue.set(uniquePidForProcess(process), process);
const uniqueParentPid = uniqueParentPidForProcess(process);
const processChildren = idToChildren.get(uniqueParentPid);
if (processChildren) {
processChildren.push(process);
} else {
idToChildren.set(uniqueParentPid, [process]);
}
}
return {
idToChildren,
idToProcess: idToValue,
};
}
/**
* Returns an array with any children `ProcessEvent`s of the passed in `process`
*/
export function children(tree: IndexedProcessTree, process: ProcessEvent): ProcessEvent[] {
const id = uniquePidForProcess(process);
const processChildren = tree.idToChildren.get(id);
return processChildren === undefined ? [] : processChildren;
}
/**
* Returns the parent ProcessEvent, if any, for the passed in `childProcess`
*/
export function parent(
tree: IndexedProcessTree,
childProcess: ProcessEvent
): ProcessEvent | undefined {
const uniqueParentPid = uniqueParentPidForProcess(childProcess);
if (uniqueParentPid === undefined) {
return undefined;
} else {
return tree.idToProcess.get(uniqueParentPid);
}
}
/**
* Number of processes in the tree
*/
export function size(tree: IndexedProcessTree) {
return tree.idToProcess.size;
}
/**
* Return the root process
*/
export function root(tree: IndexedProcessTree) {
if (size(tree) === 0) {
return null;
}
let current: ProcessEvent = tree.idToProcess.values().next().value;
while (parent(tree, current) !== undefined) {
current = parent(tree, current)!;
}
return current;
}
/**
* Yield processes in level order
*/
export function* levelOrder(tree: IndexedProcessTree) {
const rootNode = root(tree);
if (rootNode !== null) {
yield* baseLevelOrder(rootNode, children.bind(null, tree));
}
}

View file

@ -0,0 +1,26 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { eventType } from './process_event';
import { ProcessEvent } from '../types';
import { mockProcessEvent } from './process_event_test_helpers';
describe('process event', () => {
describe('eventType', () => {
let event: ProcessEvent;
beforeEach(() => {
event = mockProcessEvent({
data_buffer: {
node_id: 1,
event_type_full: 'process_event',
},
});
});
it("returns the right value when the subType is 'creation_event'", () => {
event.data_buffer.event_subtype_full = 'creation_event';
expect(eventType(event)).toEqual('processCreated');
});
});
});

View file

@ -0,0 +1,53 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { ProcessEvent } from '../types';
/**
* Returns true if the process's eventType is either 'processCreated' or 'processRan'.
* Resolver will only render 'graphable' process events.
*/
export function isGraphableProcess(event: ProcessEvent) {
return eventType(event) === 'processCreated' || eventType(event) === 'processRan';
}
/**
* Returns a custom event type for a process event based on the event's metadata.
*/
export function eventType(event: ProcessEvent) {
const {
data_buffer: { event_type_full: type, event_subtype_full: subType },
} = event;
if (type === 'process_event') {
if (subType === 'creation_event' || subType === 'fork_event' || subType === 'exec_event') {
return 'processCreated';
} else if (subType === 'already_running') {
return 'processRan';
} else if (subType === 'termination_event') {
return 'processTerminated';
} else {
return 'unknownProcessEvent';
}
} else if (type === 'alert_event') {
return 'processCausedAlert';
}
return 'unknownEvent';
}
/**
* Returns the process event's pid
*/
export function uniquePidForProcess(event: ProcessEvent) {
return event.data_buffer.node_id;
}
/**
* Returns the process event's parent pid
*/
export function uniqueParentPidForProcess(event: ProcessEvent) {
return event.data_buffer.source_id;
}

View file

@ -0,0 +1,35 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { ProcessEvent } from '../types';
type DeepPartial<T> = { [K in keyof T]?: DeepPartial<T[K]> };
/**
* Creates a mock process event given the 'parts' argument, which can
* include all or some process event fields as determined by the ProcessEvent type.
* The only field that must be provided is the event's 'node_id' field.
* The other fields are populated by the function unless provided in 'parts'
*/
export function mockProcessEvent(
parts: {
data_buffer: { node_id: ProcessEvent['data_buffer']['node_id'] };
} & DeepPartial<ProcessEvent>
): ProcessEvent {
const { data_buffer: dataBuffer } = parts;
return {
event_timestamp: 1,
event_type: 1,
machine_id: '',
...parts,
data_buffer: {
event_subtype_full: 'creation_event',
event_type_full: 'process_event',
process_name: '',
process_path: '',
...dataBuffer,
},
};
}

View file

@ -0,0 +1,74 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { Vector2 } from '../../types';
interface UserScaled {
readonly type: 'userScaled';
/**
* A vector who's `x` and `y` component will be the new scaling factors for the projection.
*/
readonly payload: Vector2;
}
interface UserZoomed {
readonly type: 'userZoomed';
/**
* A value to zoom in by. Should be a fraction of `1`. For a `'wheel'` event when `event.deltaMode` is `'pixel'`, pass `event.deltaY / -renderHeight` where `renderHeight` is the height of the Resolver element in pixels.
*/
payload: number;
}
interface UserSetRasterSize {
readonly type: 'userSetRasterSize';
/**
* The dimensions of the Resolver component in pixels. The Resolver component should not be scrollable itself.
*/
readonly payload: Vector2;
}
/**
* This is currently only used in tests. The 'back to center' button will use this action, and more tests around its behavior will need to be added.
*/
interface UserSetPositionOfCamera {
readonly type: 'userSetPositionOfCamera';
/**
* The world transform of the camera
*/
readonly payload: Vector2;
}
interface UserStartedPanning {
readonly type: 'userStartedPanning';
/**
* A vector in screen coordinates (each unit is a pixel and the Y axis increases towards the bottom of the screen)
* relative to the Resolver component.
* Represents a starting position during panning for a pointing device.
*/
readonly payload: Vector2;
}
interface UserStoppedPanning {
readonly type: 'userStoppedPanning';
}
interface UserMovedPointer {
readonly type: 'userMovedPointer';
/**
* A vector in screen coordinates relative to the Resolver component.
* The payload should be contain clientX and clientY minus the client position of the Resolver component.
*/
readonly payload: Vector2;
}
export type CameraAction =
| UserScaled
| UserSetRasterSize
| UserSetPositionOfCamera
| UserStartedPanning
| UserStoppedPanning
| UserZoomed
| UserMovedPointer;

View file

@ -0,0 +1,22 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
/**
* The 'camera' in Resolver models the visible area of the Resolver map. Resolver users
* can click and drag the Resolver element to pan the map. They can pinch the trackpad
* or use Ctrl-MouseWheel to _zoom_, which changes the scale.
*
* The camera considers the size of Resolver in pixels, and it considers any panning that
* has been done, and it considers the scale. With this information it maps points on
* the screen to points in Resolver's 'world'. Entities that are mapped in Resolver
* are positioned in these unitless 'world' coordinates, and where they show up (if at all)
* on the screen is determined by the camera.
*
* In the future, we may cull entities before rendering them to the DOM. Entities that
* would not be in the camera's viewport would be ignored.
*/
export { cameraReducer } from './reducer';
export { CameraAction } from './action';

View file

@ -0,0 +1,109 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { Store, createStore } from 'redux';
import { CameraAction } from './action';
import { CameraState } from '../../types';
import { cameraReducer } from './reducer';
import { inverseProjectionMatrix } from './selectors';
import { applyMatrix3 } from '../../lib/vector2';
describe('inverseProjectionMatrix', () => {
let store: Store<CameraState, CameraAction>;
let compare: (worldPosition: [number, number], expectedRasterPosition: [number, number]) => void;
beforeEach(() => {
store = createStore(cameraReducer, undefined);
compare = (rasterPosition: [number, number], expectedWorldPosition: [number, number]) => {
const [worldX, worldY] = applyMatrix3(
rasterPosition,
inverseProjectionMatrix(store.getState())
);
expect(worldX).toBeCloseTo(expectedWorldPosition[0]);
expect(worldY).toBeCloseTo(expectedWorldPosition[1]);
};
});
describe('when the raster size is 300 x 200 pixels', () => {
beforeEach(() => {
const action: CameraAction = { type: 'userSetRasterSize', payload: [300, 200] };
store.dispatch(action);
});
it('should convert 150,100 in raster space to 0,0 (center) in world space', () => {
compare([150, 100], [0, 0]);
});
it('should convert 150,0 in raster space to 0,100 (top) in world space', () => {
compare([150, 0], [0, 100]);
});
it('should convert 300,0 in raster space to 150,100 (top right) in world space', () => {
compare([300, 0], [150, 100]);
});
it('should convert 300,100 in raster space to 150,0 (right) in world space', () => {
compare([300, 100], [150, 0]);
});
it('should convert 300,200 in raster space to 150,-100 (right bottom) in world space', () => {
compare([300, 200], [150, -100]);
});
it('should convert 150,200 in raster space to 0,-100 (bottom) in world space', () => {
compare([150, 200], [0, -100]);
});
it('should convert 0,200 in raster space to -150,-100 (bottom left) in world space', () => {
compare([0, 200], [-150, -100]);
});
it('should convert 0,100 in raster space to -150,0 (left) in world space', () => {
compare([0, 100], [-150, 0]);
});
it('should convert 0,0 in raster space to -150,100 (top left) in world space', () => {
compare([0, 0], [-150, 100]);
});
describe('when the user has zoomed to 0.5', () => {
beforeEach(() => {
const action: CameraAction = { type: 'userScaled', payload: [0.5, 0.5] };
store.dispatch(action);
});
it('should convert 150, 100 (center) to 0, 0 (center) in world space', () => {
compare([150, 100], [0, 0]);
});
});
describe('when the user has panned to the right and up by 50', () => {
beforeEach(() => {
const action: CameraAction = { type: 'userSetPositionOfCamera', payload: [-50, -50] };
store.dispatch(action);
});
it('should convert 100,150 in raster space to 0,0 (center) in world space', () => {
compare([100, 150], [0, 0]);
});
it('should convert 150,100 (center) in raster space to 50,50 (right and up a bit) in world space', () => {
compare([150, 100], [50, 50]);
});
it('should convert 160,210 (center) in raster space to 60,-60 (right and down a bit) in world space', () => {
compare([160, 210], [60, -60]);
});
});
describe('when the user has panned to the right by 350 and up by 250', () => {
beforeEach(() => {
const action: CameraAction = { type: 'userSetPositionOfCamera', payload: [-350, -250] };
store.dispatch(action);
});
describe('when the user has scaled to 2', () => {
// the viewport will only cover half, or 150x100 instead of 300x200
beforeEach(() => {
const action: CameraAction = { type: 'userScaled', payload: [2, 2] };
store.dispatch(action);
});
// we expect the viewport to be
// minX = 350 - (150/2) = 275
// maxX = 350 + (150/2) = 425
// minY = 250 - (100/2) = 200
// maxY = 250 + (100/2) = 300
it('should convert 150,100 (center) in raster space to 350,250 in world space', () => {
compare([150, 100], [350, 250]);
});
it('should convert 0,0 (top left) in raster space to 275,300 in world space', () => {
compare([0, 0], [275, 300]);
});
});
});
});
});

View file

@ -0,0 +1,61 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { Store, createStore } from 'redux';
import { cameraReducer } from './reducer';
import { CameraState, Vector2 } from '../../types';
import { CameraAction } from './action';
import { translation } from './selectors';
describe('panning interaction', () => {
let store: Store<CameraState, CameraAction>;
let translationShouldBeCloseTo: (expectedTranslation: Vector2) => void;
beforeEach(() => {
store = createStore(cameraReducer, undefined);
translationShouldBeCloseTo = expectedTranslation => {
const actualTranslation = translation(store.getState());
expect(expectedTranslation[0]).toBeCloseTo(actualTranslation[0]);
expect(expectedTranslation[1]).toBeCloseTo(actualTranslation[1]);
};
});
describe('when the raster size is 300 x 200 pixels', () => {
beforeEach(() => {
const action: CameraAction = { type: 'userSetRasterSize', payload: [300, 200] };
store.dispatch(action);
});
it('should have a translation of 0,0', () => {
translationShouldBeCloseTo([0, 0]);
});
describe('when the user has started panning', () => {
beforeEach(() => {
const action: CameraAction = { type: 'userStartedPanning', payload: [100, 100] };
store.dispatch(action);
});
it('should have a translation of 0,0', () => {
translationShouldBeCloseTo([0, 0]);
});
describe('when the user continues to pan 50px up and to the right', () => {
beforeEach(() => {
const action: CameraAction = { type: 'userMovedPointer', payload: [150, 50] };
store.dispatch(action);
});
it('should have a translation of 50,50', () => {
translationShouldBeCloseTo([50, 50]);
});
describe('when the user then stops panning', () => {
beforeEach(() => {
const action: CameraAction = { type: 'userStoppedPanning' };
store.dispatch(action);
});
it('should have a translation of 50,50', () => {
translationShouldBeCloseTo([50, 50]);
});
});
});
});
});
});

View file

@ -0,0 +1,112 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { Store, createStore } from 'redux';
import { CameraAction } from './action';
import { CameraState } from '../../types';
import { cameraReducer } from './reducer';
import { projectionMatrix } from './selectors';
import { applyMatrix3 } from '../../lib/vector2';
describe('projectionMatrix', () => {
let store: Store<CameraState, CameraAction>;
let compare: (worldPosition: [number, number], expectedRasterPosition: [number, number]) => void;
beforeEach(() => {
store = createStore(cameraReducer, undefined);
compare = (worldPosition: [number, number], expectedRasterPosition: [number, number]) => {
const [rasterX, rasterY] = applyMatrix3(worldPosition, projectionMatrix(store.getState()));
expect(rasterX).toBeCloseTo(expectedRasterPosition[0]);
expect(rasterY).toBeCloseTo(expectedRasterPosition[1]);
};
});
describe('when the raster size is 300 x 200 pixels', () => {
beforeEach(() => {
const action: CameraAction = { type: 'userSetRasterSize', payload: [300, 200] };
store.dispatch(action);
});
it('should convert 0,0 (center) in world space to 150,100 in raster space', () => {
compare([0, 0], [150, 100]);
});
it('should convert 0,100 (top) in world space to 150,0 in raster space', () => {
compare([0, 100], [150, 0]);
});
it('should convert 150,100 (top right) in world space to 300,0 in raster space', () => {
compare([150, 100], [300, 0]);
});
it('should convert 150,0 (right) in world space to 300,100 in raster space', () => {
compare([150, 0], [300, 100]);
});
it('should convert 150,-100 (right bottom) in world space to 300,200 in raster space', () => {
compare([150, -100], [300, 200]);
});
it('should convert 0,-100 (bottom) in world space to 150,200 in raster space', () => {
compare([0, -100], [150, 200]);
});
it('should convert -150,-100 (bottom left) in world space to 0,200 in raster space', () => {
compare([-150, -100], [0, 200]);
});
it('should convert -150,0 (left) in world space to 0,100 in raster space', () => {
compare([-150, 0], [0, 100]);
});
it('should convert -150,100 (top left) in world space to 0,0 in raster space', () => {
compare([-150, 100], [0, 0]);
});
describe('when the user has zoomed to 0.5', () => {
beforeEach(() => {
const action: CameraAction = { type: 'userScaled', payload: [0.5, 0.5] };
store.dispatch(action);
});
it('should convert 0, 0 (center) in world space to 150, 100 (center)', () => {
compare([0, 0], [150, 100]);
});
});
describe('when the user has panned to the right and up by 50', () => {
beforeEach(() => {
const action: CameraAction = { type: 'userSetPositionOfCamera', payload: [-50, -50] };
store.dispatch(action);
});
it('should convert 0,0 (center) in world space to 100,150 in raster space', () => {
compare([0, 0], [100, 150]);
});
it('should convert 50,50 (right and up a bit) in world space to 150,100 (center) in raster space', () => {
compare([50, 50], [150, 100]);
});
it('should convert 60,-60 (right and down a bit) in world space to 160,210 (center) in raster space', () => {
compare([60, -60], [160, 210]);
});
});
describe('when the user has panned to the right by 350 and up by 250', () => {
beforeEach(() => {
const action: CameraAction = {
type: 'userSetPositionOfCamera',
payload: [-350, -250],
};
store.dispatch(action);
});
it('should convert 350,250 in world space to 150,100 (center) in raster space', () => {
compare([350, 250], [150, 100]);
});
describe('when the user has scaled to 2', () => {
// the viewport will only cover half, or 150x100 instead of 300x200
beforeEach(() => {
const action: CameraAction = { type: 'userScaled', payload: [2, 2] };
store.dispatch(action);
});
// we expect the viewport to be
// minX = 350 - (150/2) = 275
// maxX = 350 + (150/2) = 425
// minY = 250 - (100/2) = 200
// maxY = 250 + (100/2) = 300
it('should convert 350,250 in world space to 150,100 (center) in raster space', () => {
compare([350, 250], [150, 100]);
});
it('should convert 275,300 in world space to 0,0 (top left) in raster space', () => {
compare([275, 300], [0, 0]);
});
});
});
});
});

View file

@ -0,0 +1,159 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { Reducer } from 'redux';
import { applyMatrix3, subtract } from '../../lib/vector2';
import { userIsPanning, translation, projectionMatrix, inverseProjectionMatrix } from './selectors';
import { clamp } from '../../lib/math';
import { CameraState, ResolverAction } from '../../types';
function initialState(): CameraState {
return {
scaling: [1, 1] as const,
rasterSize: [0, 0] as const,
translationNotCountingCurrentPanning: [0, 0] as const,
latestFocusedWorldCoordinates: null,
};
}
/**
* The minimum allowed value for the camera scale. This is the least scale that we will ever render something at.
*/
const minimumScale = 0.1;
/**
* The maximum allowed value for the camera scale. This is greatest scale that we will ever render something at.
*/
const maximumScale = 6;
export const cameraReducer: Reducer<CameraState, ResolverAction> = (
state = initialState(),
action
) => {
if (action.type === 'userScaled') {
/**
* Handle the scale being explicitly set, for example by a 'reset zoom' feature, or by a range slider with exact scale values
*/
const [deltaX, deltaY] = action.payload;
return {
...state,
scaling: [
clamp(deltaX, minimumScale, maximumScale),
clamp(deltaY, minimumScale, maximumScale),
],
};
} else if (action.type === 'userZoomed') {
/**
* When the user zooms we change the scale. Limit the change in scale so that we aren't liable for supporting crazy values (e.g. infinity or negative scale.)
*/
const newScaleX = clamp(state.scaling[0] + action.payload, minimumScale, maximumScale);
const newScaleY = clamp(state.scaling[1] + action.payload, minimumScale, maximumScale);
const stateWithNewScaling: CameraState = {
...state,
scaling: [newScaleX, newScaleY],
};
/**
* Zooming fundamentally just changes the scale, but that would always zoom in (or out) around the center of the map. The user might be interested in
* something else, like a node. If the user has moved their pointer on to the map, we can keep the pointer over the same point in the map by adjusting the
* panning when we zoom.
*
* You can see this in action by moving your pointer over a node that isn't directly in the center of the map and then changing the zoom level. Do it by
* using CTRL and the mousewheel, or by pinching the trackpad on a Mac. The node will stay under your mouse cursor and other things in the map will get
* nearer or further from the mouse cursor. This lets you keep your context when changing zoom levels.
*/
if (state.latestFocusedWorldCoordinates !== null) {
const rasterOfLastFocusedWorldCoordinates = applyMatrix3(
state.latestFocusedWorldCoordinates,
projectionMatrix(state)
);
const matrix = inverseProjectionMatrix(stateWithNewScaling);
const worldCoordinateThereNow = applyMatrix3(rasterOfLastFocusedWorldCoordinates, matrix);
const delta = subtract(worldCoordinateThereNow, state.latestFocusedWorldCoordinates);
return {
...stateWithNewScaling,
translationNotCountingCurrentPanning: [
stateWithNewScaling.translationNotCountingCurrentPanning[0] + delta[0],
stateWithNewScaling.translationNotCountingCurrentPanning[1] + delta[1],
],
};
} else {
return stateWithNewScaling;
}
} else if (action.type === 'userSetPositionOfCamera') {
/**
* Handle the case where the position of the camera is explicitly set, for example by a 'back to center' feature.
*/
return {
...state,
translationNotCountingCurrentPanning: action.payload,
};
} else if (action.type === 'userStartedPanning') {
/**
* When the user begins panning with a mousedown event we mark the starting position for later comparisons.
*/
return {
...state,
panning: {
origin: action.payload,
currentOffset: action.payload,
},
};
} else if (action.type === 'userStoppedPanning') {
/**
* When the user stops panning (by letting up on the mouse) we calculate the new translation of the camera.
*/
if (userIsPanning(state)) {
return {
...state,
translationNotCountingCurrentPanning: translation(state),
panning: undefined,
};
} else {
return state;
}
} else if (action.type === 'userSetRasterSize') {
/**
* Handle resizes of the Resolver component. We need to know the size in order to convert between screen
* and world coordinates.
*/
return {
...state,
rasterSize: action.payload,
};
} else if (action.type === 'userMovedPointer') {
const stateWithUpdatedPanning = {
...state,
/**
* If the user is panning, adjust the panning offset
*/
panning: userIsPanning(state)
? {
origin: state.panning ? state.panning.origin : action.payload,
currentOffset: action.payload,
}
: state.panning,
};
return {
...stateWithUpdatedPanning,
/**
* keep track of the last world coordinates the user moved over.
* When the scale of the projection matrix changes, we adjust the camera's world transform in order
* to keep the same point under the pointer.
* In order to do this, we need to know the position of the mouse when changing the scale.
*/
latestFocusedWorldCoordinates: applyMatrix3(
action.payload,
inverseProjectionMatrix(stateWithUpdatedPanning)
),
};
} else {
return state;
}
};

View file

@ -0,0 +1,183 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { Vector2, CameraState, AABB, Matrix3 } from '../../types';
import { subtract, divide, add, applyMatrix3 } from '../../lib/vector2';
import { multiply, add as addMatrix } from '../../lib/matrix3';
import {
inverseOrthographicProjection,
scalingTransformation,
orthographicProjection,
translationTransformation,
} from '../../lib/transformation';
interface ClippingPlanes {
renderWidth: number;
renderHeight: number;
clippingPlaneRight: number;
clippingPlaneTop: number;
clippingPlaneLeft: number;
clippingPlaneBottom: number;
}
/**
* The viewable area in the Resolver map, in world coordinates.
*/
export function viewableBoundingBox(state: CameraState): AABB {
const { renderWidth, renderHeight } = clippingPlanes(state);
const matrix = inverseProjectionMatrix(state);
const bottomLeftCorner: Vector2 = [0, renderHeight];
const topRightCorner: Vector2 = [renderWidth, 0];
return {
minimum: applyMatrix3(bottomLeftCorner, matrix),
maximum: applyMatrix3(topRightCorner, matrix),
};
}
/**
* The 2D clipping planes used for the orthographic projection. See https://en.wikipedia.org/wiki/Orthographic_projection
*/
function clippingPlanes(state: CameraState): ClippingPlanes {
const renderWidth = state.rasterSize[0];
const renderHeight = state.rasterSize[1];
const clippingPlaneRight = renderWidth / 2 / state.scaling[0];
const clippingPlaneTop = renderHeight / 2 / state.scaling[1];
return {
renderWidth,
renderHeight,
clippingPlaneRight,
clippingPlaneTop,
clippingPlaneLeft: -clippingPlaneRight,
clippingPlaneBottom: -clippingPlaneTop,
};
}
/**
* A matrix that when applied to a Vector2 will convert it from world coordinates to screen coordinates.
* See https://en.wikipedia.org/wiki/Orthographic_projection
*/
export const projectionMatrix: (state: CameraState) => Matrix3 = state => {
const {
renderWidth,
renderHeight,
clippingPlaneRight,
clippingPlaneTop,
clippingPlaneLeft,
clippingPlaneBottom,
} = clippingPlanes(state);
return multiply(
// 5. convert from 0->2 to 0->rasterWidth (or height)
scalingTransformation([renderWidth / 2, renderHeight / 2]),
addMatrix(
// 4. add one to change range from -1->1 to 0->2
[0, 0, 1, 0, 0, 1, 0, 0, 0],
multiply(
// 3. invert y since CSS has inverted y
scalingTransformation([1, -1]),
multiply(
// 2. scale to clipping plane
orthographicProjection(
clippingPlaneTop,
clippingPlaneRight,
clippingPlaneBottom,
clippingPlaneLeft
),
// 1. adjust for camera
translationTransformation(translation(state))
)
)
)
);
};
/**
* The camera has a translation value (not counting any current panning.) This is initialized to (0, 0) and
* updating any time panning ends.
*
* When the user is panning, we keep the initial position of the pointer and the current position of the
* pointer. The difference between these values equals the panning vector.
*
* When the user is panning, the translation of the camera is found by adding the panning vector to the
* translationNotCountingCurrentPanning.
*
* We could update the translation as the user moved the mouse but floating point drift (round-off error) could occur.
*/
export function translation(state: CameraState): Vector2 {
if (state.panning) {
return add(
state.translationNotCountingCurrentPanning,
divide(subtract(state.panning.currentOffset, state.panning.origin), [
state.scaling[0],
// Invert `y` since the `.panning` vectors are in screen coordinates and therefore have backwards `y`
-state.scaling[1],
])
);
} else {
return state.translationNotCountingCurrentPanning;
}
}
/**
* A matrix that when applied to a Vector2 converts it from screen coordinates to world coordinates.
* See https://en.wikipedia.org/wiki/Orthographic_projection
*/
export const inverseProjectionMatrix: (state: CameraState) => Matrix3 = state => {
const {
renderWidth,
renderHeight,
clippingPlaneRight,
clippingPlaneTop,
clippingPlaneLeft,
clippingPlaneBottom,
} = clippingPlanes(state);
/* prettier-ignore */
const screenToNDC = [
2 / renderWidth, 0, -1,
0, 2 / renderHeight, -1,
0, 0, 0
] as const
const [translationX, translationY] = translation(state);
return addMatrix(
// 4. Translate for the 'camera'
// prettier-ignore
[
0, 0, -translationX,
0, 0, -translationY,
0, 0, 0
] as const,
multiply(
// 3. make values in range of clipping planes
inverseOrthographicProjection(
clippingPlaneTop,
clippingPlaneRight,
clippingPlaneBottom,
clippingPlaneLeft
),
multiply(
// 2 Invert Y since CSS has inverted y
scalingTransformation([1, -1]),
// 1. convert screen coordinates to NDC
// e.g. for x-axis, divide by renderWidth then multiply by 2 and subtract by one so the value is in range of -1->1
screenToNDC
)
)
);
};
/**
* The scale by which world values are scaled when rendered.
*/
export const scale = (state: CameraState): Vector2 => state.scaling;
/**
* Whether or not the user is current panning the map.
*/
export const userIsPanning = (state: CameraState): boolean => state.panning !== undefined;

View file

@ -0,0 +1,27 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { Store } from 'redux';
import { CameraAction } from './action';
import { CameraState, Vector2 } from '../../types';
type CameraStore = Store<CameraState, CameraAction>;
/**
* Dispatches a 'userScaled' action.
*/
export function userScaled(store: CameraStore, scalingValue: [number, number]): void {
const action: CameraAction = { type: 'userScaled', payload: scalingValue };
store.dispatch(action);
}
/**
* Used to assert that two Vector2s are close to each other (accounting for round-off errors.)
*/
export function expectVectorsToBeClose(first: Vector2, second: Vector2): void {
expect(first[0]).toBeCloseTo(second[0]);
expect(first[1]).toBeCloseTo(second[1]);
}

View file

@ -0,0 +1,142 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { CameraAction } from './action';
import { cameraReducer } from './reducer';
import { createStore, Store } from 'redux';
import { CameraState, AABB } from '../../types';
import { viewableBoundingBox, inverseProjectionMatrix } from './selectors';
import { userScaled, expectVectorsToBeClose } from './test_helpers';
import { applyMatrix3 } from '../../lib/vector2';
describe('zooming', () => {
let store: Store<CameraState, CameraAction>;
const cameraShouldBeBoundBy = (expectedViewableBoundingBox: AABB): [string, () => void] => {
return [
`the camera view should be bound by an AABB with a minimum point of ${expectedViewableBoundingBox.minimum} and a maximum point of ${expectedViewableBoundingBox.maximum}`,
() => {
const actual = viewableBoundingBox(store.getState());
expect(actual.minimum[0]).toBeCloseTo(expectedViewableBoundingBox.minimum[0]);
expect(actual.minimum[1]).toBeCloseTo(expectedViewableBoundingBox.minimum[1]);
expect(actual.maximum[0]).toBeCloseTo(expectedViewableBoundingBox.maximum[0]);
expect(actual.maximum[1]).toBeCloseTo(expectedViewableBoundingBox.maximum[1]);
},
];
};
beforeEach(() => {
store = createStore(cameraReducer, undefined);
});
describe('when the raster size is 300 x 200 pixels', () => {
beforeEach(() => {
const action: CameraAction = { type: 'userSetRasterSize', payload: [300, 200] };
store.dispatch(action);
});
it(
...cameraShouldBeBoundBy({
minimum: [-150, -100],
maximum: [150, 100],
})
);
describe('when the user has scaled in to 2x', () => {
beforeEach(() => {
userScaled(store, [2, 2]);
});
it(
...cameraShouldBeBoundBy({
minimum: [-75, -50],
maximum: [75, 50],
})
);
});
describe('when the user zooms in by 1 zoom unit', () => {
beforeEach(() => {
const action: CameraAction = {
type: 'userZoomed',
payload: 1,
};
store.dispatch(action);
});
it(
...cameraShouldBeBoundBy({
minimum: [-75, -50],
maximum: [75, 50],
})
);
});
it('the raster position 200, 50 should map to the world position 50, 50', () => {
expectVectorsToBeClose(applyMatrix3([200, 50], inverseProjectionMatrix(store.getState())), [
50,
50,
]);
});
describe('when the user has moved their mouse to the raster position 200, 50', () => {
beforeEach(() => {
const action: CameraAction = {
type: 'userMovedPointer',
payload: [200, 50],
};
store.dispatch(action);
});
it('should have focused the world position 50, 50', () => {
const coords = store.getState().latestFocusedWorldCoordinates;
if (coords !== null) {
expectVectorsToBeClose(coords, [50, 50]);
} else {
throw new Error('coords should not have been null');
}
});
describe('when the user zooms in by 0.5 zoom units', () => {
beforeEach(() => {
const action: CameraAction = {
type: 'userZoomed',
payload: 0.5,
};
store.dispatch(action);
});
it('the raster position 200, 50 should map to the world position 50, 50', () => {
expectVectorsToBeClose(
applyMatrix3([200, 50], inverseProjectionMatrix(store.getState())),
[50, 50]
);
});
});
});
describe('when the user pans right by 100 pixels', () => {
beforeEach(() => {
const action: CameraAction = { type: 'userSetPositionOfCamera', payload: [-100, 0] };
store.dispatch(action);
});
it(
...cameraShouldBeBoundBy({
minimum: [-50, -100],
maximum: [250, 100],
})
);
it('should be centered on 100, 0', () => {
const worldCenterPoint = applyMatrix3(
[150, 100],
inverseProjectionMatrix(store.getState())
);
expect(worldCenterPoint[0]).toBeCloseTo(100);
expect(worldCenterPoint[1]).toBeCloseTo(0);
});
describe('when the user scales to 2x', () => {
beforeEach(() => {
userScaled(store, [2, 2]);
});
it('should be centered on 100, 0', () => {
const worldCenterPoint = applyMatrix3(
[150, 100],
inverseProjectionMatrix(store.getState())
);
expect(worldCenterPoint[0]).toBeCloseTo(100);
expect(worldCenterPoint[1]).toBeCloseTo(0);
});
});
});
});
});

View file

@ -0,0 +1,347 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`resolver graph layout when rendering no nodes renders right 1`] = `
Object {
"edgeLineSegments": Array [],
"processNodePositions": Map {},
}
`;
exports[`resolver graph layout when rendering one node renders right 1`] = `
Object {
"edgeLineSegments": Array [],
"processNodePositions": Map {
Object {
"data_buffer": Object {
"event_subtype_full": "creation_event",
"event_type_full": "process_event",
"node_id": 0,
"process_name": "",
"process_path": "",
},
"event_timestamp": 1,
"event_type": 1,
"machine_id": "",
} => Array [
0,
-0.8164965809277259,
],
},
}
`;
exports[`resolver graph layout when rendering two forks, and one fork has an extra long tine renders right 1`] = `
Object {
"edgeLineSegments": Array [
Array [
Array [
0,
-0.8164965809277259,
],
Array [
35.35533905932738,
-21.228911104120876,
],
],
Array [
Array [
-35.35533905932738,
-62.053740150507174,
],
Array [
106.06601717798213,
19.595917942265423,
],
],
Array [
Array [
-35.35533905932738,
-62.053740150507174,
],
Array [
0,
-82.46615467370032,
],
],
Array [
Array [
106.06601717798213,
19.595917942265423,
],
Array [
141.4213562373095,
-0.8164965809277259,
],
],
Array [
Array [
0,
-82.46615467370032,
],
Array [
35.35533905932738,
-102.87856919689347,
],
],
Array [
Array [
0,
-123.2909837200866,
],
Array [
70.71067811865476,
-82.46615467370032,
],
],
Array [
Array [
0,
-123.2909837200866,
],
Array [
35.35533905932738,
-143.70339824327976,
],
],
Array [
Array [
70.71067811865476,
-82.46615467370032,
],
Array [
106.06601717798213,
-102.87856919689347,
],
],
Array [
Array [
141.4213562373095,
-0.8164965809277259,
],
Array [
176.7766952966369,
-21.22891110412087,
],
],
Array [
Array [
141.4213562373095,
-41.64132562731402,
],
Array [
212.13203435596427,
-0.8164965809277259,
],
],
Array [
Array [
141.4213562373095,
-41.64132562731402,
],
Array [
176.7766952966369,
-62.053740150507174,
],
],
Array [
Array [
212.13203435596427,
-0.8164965809277259,
],
Array [
247.48737341529164,
-21.228911104120883,
],
],
Array [
Array [
247.48737341529164,
-21.228911104120883,
],
Array [
318.1980515339464,
-62.05374015050717,
],
],
],
"processNodePositions": Map {
Object {
"data_buffer": Object {
"event_subtype_full": "creation_event",
"event_type_full": "process_event",
"node_id": 0,
"process_name": "",
"process_path": "",
},
"event_timestamp": 1,
"event_type": 1,
"machine_id": "",
} => Array [
0,
-0.8164965809277259,
],
Object {
"data_buffer": Object {
"event_subtype_full": "already_running",
"event_type_full": "process_event",
"node_id": 1,
"process_name": "",
"process_path": "",
"source_id": 0,
},
"event_timestamp": 1,
"event_type": 1,
"machine_id": "",
} => Array [
0,
-82.46615467370032,
],
Object {
"data_buffer": Object {
"event_subtype_full": "creation_event",
"event_type_full": "process_event",
"node_id": 2,
"process_name": "",
"process_path": "",
"source_id": 0,
},
"event_timestamp": 1,
"event_type": 1,
"machine_id": "",
} => Array [
141.4213562373095,
-0.8164965809277259,
],
Object {
"data_buffer": Object {
"event_subtype_full": "creation_event",
"event_type_full": "process_event",
"node_id": 3,
"process_name": "",
"process_path": "",
"source_id": 1,
},
"event_timestamp": 1,
"event_type": 1,
"machine_id": "",
} => Array [
35.35533905932738,
-143.70339824327976,
],
Object {
"data_buffer": Object {
"event_subtype_full": "creation_event",
"event_type_full": "process_event",
"node_id": 4,
"process_name": "",
"process_path": "",
"source_id": 1,
},
"event_timestamp": 1,
"event_type": 1,
"machine_id": "",
} => Array [
106.06601717798213,
-102.87856919689347,
],
Object {
"data_buffer": Object {
"event_subtype_full": "creation_event",
"event_type_full": "process_event",
"node_id": 5,
"process_name": "",
"process_path": "",
"source_id": 2,
},
"event_timestamp": 1,
"event_type": 1,
"machine_id": "",
} => Array [
176.7766952966369,
-62.053740150507174,
],
Object {
"data_buffer": Object {
"event_subtype_full": "creation_event",
"event_type_full": "process_event",
"node_id": 6,
"process_name": "",
"process_path": "",
"source_id": 2,
},
"event_timestamp": 1,
"event_type": 1,
"machine_id": "",
} => Array [
247.48737341529164,
-21.228911104120883,
],
Object {
"data_buffer": Object {
"event_subtype_full": "creation_event",
"event_type_full": "process_event",
"node_id": 7,
"process_name": "",
"process_path": "",
"source_id": 6,
},
"event_timestamp": 1,
"event_type": 1,
"machine_id": "",
} => Array [
318.1980515339464,
-62.05374015050717,
],
},
}
`;
exports[`resolver graph layout when rendering two nodes, one being the parent of the other renders right 1`] = `
Object {
"edgeLineSegments": Array [
Array [
Array [
0,
-0.8164965809277259,
],
Array [
70.71067811865476,
-41.641325627314025,
],
],
],
"processNodePositions": Map {
Object {
"data_buffer": Object {
"event_subtype_full": "creation_event",
"event_type_full": "process_event",
"node_id": 0,
"process_name": "",
"process_path": "",
},
"event_timestamp": 1,
"event_type": 1,
"machine_id": "",
} => Array [
0,
-0.8164965809277259,
],
Object {
"data_buffer": Object {
"event_subtype_full": "already_running",
"event_type_full": "process_event",
"node_id": 1,
"process_name": "",
"process_path": "",
"source_id": 0,
},
"event_timestamp": 1,
"event_type": 1,
"machine_id": "",
} => Array [
70.71067811865476,
-41.641325627314025,
],
},
}
`;

View file

@ -0,0 +1,20 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { ProcessEvent } from '../../types';
interface ServerReturnedResolverData {
readonly type: 'serverReturnedResolverData';
readonly payload: {
readonly data: {
readonly result: {
readonly search_results: readonly ProcessEvent[];
};
};
};
}
export type DataAction = ServerReturnedResolverData;

View file

@ -0,0 +1,212 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { Store, createStore } from 'redux';
import { DataAction } from './action';
import { dataReducer } from './reducer';
import { DataState, ProcessEvent } from '../../types';
import { graphableProcesses, processNodePositionsAndEdgeLineSegments } from './selectors';
import { mockProcessEvent } from '../../models/process_event_test_helpers';
describe('resolver graph layout', () => {
let processA: ProcessEvent;
let processB: ProcessEvent;
let processC: ProcessEvent;
let processD: ProcessEvent;
let processE: ProcessEvent;
let processF: ProcessEvent;
let processG: ProcessEvent;
let processH: ProcessEvent;
let processI: ProcessEvent;
let store: Store<DataState, DataAction>;
beforeEach(() => {
/*
* A
* ____|____
* | |
* B C
* ___|___ ___|___
* | | | |
* D E F G
* |
* H
*
*/
processA = mockProcessEvent({
data_buffer: {
process_name: '',
event_type_full: 'process_event',
event_subtype_full: 'creation_event',
node_id: 0,
},
});
processB = mockProcessEvent({
data_buffer: {
event_type_full: 'process_event',
event_subtype_full: 'already_running',
node_id: 1,
source_id: 0,
},
});
processC = mockProcessEvent({
data_buffer: {
event_type_full: 'process_event',
event_subtype_full: 'creation_event',
node_id: 2,
source_id: 0,
},
});
processD = mockProcessEvent({
data_buffer: {
event_type_full: 'process_event',
event_subtype_full: 'creation_event',
node_id: 3,
source_id: 1,
},
});
processE = mockProcessEvent({
data_buffer: {
event_type_full: 'process_event',
event_subtype_full: 'creation_event',
node_id: 4,
source_id: 1,
},
});
processF = mockProcessEvent({
data_buffer: {
event_type_full: 'process_event',
event_subtype_full: 'creation_event',
node_id: 5,
source_id: 2,
},
});
processG = mockProcessEvent({
data_buffer: {
event_type_full: 'process_event',
event_subtype_full: 'creation_event',
node_id: 6,
source_id: 2,
},
});
processH = mockProcessEvent({
data_buffer: {
event_type_full: 'process_event',
event_subtype_full: 'creation_event',
node_id: 7,
source_id: 6,
},
});
processI = mockProcessEvent({
data_buffer: {
event_type_full: 'process_event',
event_subtype_full: 'termination_event',
node_id: 8,
source_id: 0,
},
});
store = createStore(dataReducer, undefined);
});
describe('when rendering no nodes', () => {
beforeEach(() => {
const payload = {
data: {
result: {
search_results: [],
},
},
};
const action: DataAction = { type: 'serverReturnedResolverData', payload };
store.dispatch(action);
});
it('the graphableProcesses list should only include nothing', () => {
const actual = graphableProcesses(store.getState());
expect(actual).toEqual([]);
});
it('renders right', () => {
expect(processNodePositionsAndEdgeLineSegments(store.getState())).toMatchSnapshot();
});
});
describe('when rendering one node', () => {
beforeEach(() => {
const payload = {
data: {
result: {
search_results: [processA],
},
},
};
const action: DataAction = { type: 'serverReturnedResolverData', payload };
store.dispatch(action);
});
it('the graphableProcesses list should only include nothing', () => {
const actual = graphableProcesses(store.getState());
expect(actual).toEqual([processA]);
});
it('renders right', () => {
expect(processNodePositionsAndEdgeLineSegments(store.getState())).toMatchSnapshot();
});
});
describe('when rendering two nodes, one being the parent of the other', () => {
beforeEach(() => {
const payload = {
data: {
result: {
search_results: [processA, processB],
},
},
};
const action: DataAction = { type: 'serverReturnedResolverData', payload };
store.dispatch(action);
});
it('the graphableProcesses list should only include nothing', () => {
const actual = graphableProcesses(store.getState());
expect(actual).toEqual([processA, processB]);
});
it('renders right', () => {
expect(processNodePositionsAndEdgeLineSegments(store.getState())).toMatchSnapshot();
});
});
describe('when rendering two forks, and one fork has an extra long tine', () => {
beforeEach(() => {
const payload = {
data: {
result: {
search_results: [
processA,
processB,
processC,
processD,
processE,
processF,
processG,
processH,
processI,
],
},
},
};
const action: DataAction = { type: 'serverReturnedResolverData', payload };
store.dispatch(action);
});
it("the graphableProcesses list should only include events with 'processCreated' an 'processRan' eventType", () => {
const actual = graphableProcesses(store.getState());
expect(actual).toEqual([
processA,
processB,
processC,
processD,
processE,
processF,
processG,
processH,
]);
});
it('renders right', () => {
expect(processNodePositionsAndEdgeLineSegments(store.getState())).toMatchSnapshot();
});
});
});

View file

@ -0,0 +1,8 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export { dataReducer } from './reducer';
export { DataAction } from './action';

View file

@ -0,0 +1,31 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { Reducer } from 'redux';
import { DataState, ResolverAction } from '../../types';
import { sampleData } from './sample';
function initialState(): DataState {
return {
results: sampleData.data.result.search_results,
};
}
export const dataReducer: Reducer<DataState, ResolverAction> = (state = initialState(), action) => {
if (action.type === 'serverReturnedResolverData') {
const {
data: {
result: { search_results },
},
} = action.payload;
return {
...state,
results: search_results,
};
} else {
return state;
}
};

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,436 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { createSelector } from 'reselect';
import {
DataState,
ProcessEvent,
IndexedProcessTree,
ProcessWidths,
ProcessPositions,
EdgeLineSegment,
ProcessWithWidthMetadata,
Matrix3,
} from '../../types';
import { Vector2 } from '../../types';
import { add as vector2Add, applyMatrix3 } from '../../lib/vector2';
import { isGraphableProcess } from '../../models/process_event';
import {
factory as indexedProcessTreeFactory,
children as indexedProcessTreeChildren,
parent as indexedProcessTreeParent,
size,
levelOrder,
} from '../../models/indexed_process_tree';
const unit = 100;
const distanceBetweenNodesInUnits = 1;
/**
* An isometric projection is a method for representing three dimensional objects in 2 dimensions.
* More information about isometric projections can be found here https://en.wikipedia.org/wiki/Isometric_projection.
* In our case, we obtain the isometric projection by rotating the objects 45 degrees in the plane of the screen
* and arctan(1/sqrt(2)) (~35.3 degrees) through the horizontal axis.
*
* A rotation by 45 degrees in the plane of the screen is given by:
* [ sqrt(2)/2 -sqrt(2)/2 0
* sqrt(2)/2 sqrt(2)/2 0
* 0 0 1]
*
* A rotation by arctan(1/sqrt(2)) through the horizantal axis is given by:
* [ 1 0 0
* 0 sqrt(3)/3 -sqrt(6)/3
* 0 sqrt(6)/3 sqrt(3)/3]
*
* We can multiply both of these matrices to get the final transformation below.
*/
/* prettier-ignore */
const isometricTransformMatrix: Matrix3 = [
Math.sqrt(2) / 2, -(Math.sqrt(2) / 2), 0,
Math.sqrt(6) / 6, Math.sqrt(6) / 6, -(Math.sqrt(6) / 3),
0, 0, 1,
]
/**
* The distance in pixels (at scale 1) between nodes. Change this to space out nodes more
*/
export const distanceBetweenNodes = distanceBetweenNodesInUnits * unit;
export function graphableProcesses(state: DataState) {
return state.results.filter(isGraphableProcess);
}
/**
* In laying out the graph, we precalculate the 'width' of each subtree. The 'width' of the subtree is determined by its
* descedants and the rule that each process node must be at least 1 unit apart. Enforcing that all nodes are at least
* 1 unit apart on the x axis makes it easy to prevent the UI components from overlapping. There will always be space.
*
* Example widths:
*
* A and B each have a width of 0
*
* A
* |
* B
*
* A has a width of 1. B and C have a width of 0.
* B and C must be 1 unit apart, so the A subtree has a width of 1.
*
* A
* ____|____
* | |
* B C
*
*
* D, E, F, G, H all have a width of 0.
* B has a width of 1 since D->E must be 1 unit apart.
* Similarly, C has a width of 1 since F->G must be 1 unit apart.
* A has width of 3, since B has a width of 1, and C has a width of 1, and E->F must be at least
* 1 unit apart.
* A
* ____|____
* | |
* B C
* ___|___ ___|___
* | | | |
* D E F G
* |
* H
*
*/
function widthsOfProcessSubtrees(indexedProcessTree: IndexedProcessTree): ProcessWidths {
const widths = new Map<ProcessEvent, number>();
if (size(indexedProcessTree) === 0) {
return widths;
}
const processesInReverseLevelOrder = [...levelOrder(indexedProcessTree)].reverse();
for (const process of processesInReverseLevelOrder) {
const children = indexedProcessTreeChildren(indexedProcessTree, process);
const sumOfWidthOfChildren = function sumOfWidthOfChildren() {
return children.reduce(function sum(currentValue, child) {
/**
* `widths.get` will always return a number in this case.
* This loop sequences a tree in reverse level order. Width values are set for each node.
* Therefore a parent can always find a width for its children, since all of its children
* will have been handled already.
*/
return currentValue + widths.get(child)!;
}, 0);
};
const width = sumOfWidthOfChildren() + Math.max(0, children.length - 1) * distanceBetweenNodes;
widths.set(process, width);
}
return widths;
}
function processEdgeLineSegments(
indexedProcessTree: IndexedProcessTree,
widths: ProcessWidths,
positions: ProcessPositions
): EdgeLineSegment[] {
const edgeLineSegments: EdgeLineSegment[] = [];
for (const metadata of levelOrderWithWidths(indexedProcessTree, widths)) {
/**
* We only handle children, drawing lines back to their parents. The root has no parent, so we skip it
*/
if (metadata.parent === null) {
continue;
}
const { process, parent, parentWidth } = metadata;
const position = positions.get(process);
const parentPosition = positions.get(parent);
if (position === undefined || parentPosition === undefined) {
/**
* All positions have been precalculated, so if any are missing, it's an error. This will never happen.
*/
throw new Error();
}
/**
* The point halfway between the parent and child on the y axis, we sometimes have a hard angle here in the edge line
*/
const midwayY = parentPosition[1] + (position[1] - parentPosition[1]) / 2;
/**
* When drawing edge lines between a parent and children (when there are multiple children) we draw a pitchfork type
* design. The 'midway' line, runs along the x axis and joins all the children with a single descendant line from the parent.
* See the ascii diagram below. The underscore characters would be the midway line.
*
* A
* ____|____
* | |
* B C
*/
const lineFromProcessToMidwayLine: EdgeLineSegment = [[position[0], midwayY], position];
const siblings = indexedProcessTreeChildren(indexedProcessTree, parent);
const isFirstChild = process === siblings[0];
if (metadata.isOnlyChild) {
// add a single line segment directly from parent to child. We don't do the 'pitchfork' in this case.
edgeLineSegments.push([parentPosition, position]);
} else if (isFirstChild) {
/**
* If the parent has multiple children, we draw the 'midway' line, and the line from the
* parent to the midway line, while handling the first child.
*
* Consider A the parent, and B the first child. We would draw somemthing like what's in the below diagram. The line from the
* midway line to C would be drawn when we handle C.
*
* A
* ____|____
* |
* B C
*/
const { firstChildWidth, lastChildWidth } = metadata;
const lineFromParentToMidwayLine: EdgeLineSegment = [
parentPosition,
[parentPosition[0], midwayY],
];
const widthOfMidline = parentWidth - firstChildWidth / 2 - lastChildWidth / 2;
const minX = parentWidth / -2 + firstChildWidth / 2;
const maxX = minX + widthOfMidline;
const midwayLine: EdgeLineSegment = [
[
// Position line relative to the parent's x component
parentPosition[0] + minX,
midwayY,
],
[
// Position line relative to the parent's x component
parentPosition[0] + maxX,
midwayY,
],
];
edgeLineSegments.push(
/* line from parent to midway line */
lineFromParentToMidwayLine,
midwayLine,
lineFromProcessToMidwayLine
);
} else {
// If this isn't the first child, it must have siblings (the first of which drew the midway line and line
// from the parent to the midway line
edgeLineSegments.push(lineFromProcessToMidwayLine);
}
}
return edgeLineSegments;
}
function* levelOrderWithWidths(
tree: IndexedProcessTree,
widths: ProcessWidths
): Iterable<ProcessWithWidthMetadata> {
for (const process of levelOrder(tree)) {
const parent = indexedProcessTreeParent(tree, process);
const width = widths.get(process);
if (width === undefined) {
/**
* All widths have been precalcluated, so this will not happen.
*/
throw new Error();
}
/** If the parent is undefined, we are processing the root. */
if (parent === undefined) {
yield {
process,
width,
parent: null,
parentWidth: null,
isOnlyChild: null,
firstChildWidth: null,
lastChildWidth: null,
};
} else {
const parentWidth = widths.get(parent);
if (parentWidth === undefined) {
/**
* All widths have been precalcluated, so this will not happen.
*/
throw new Error();
}
const metadata: Partial<ProcessWithWidthMetadata> = {
process,
width,
parent,
parentWidth,
};
const siblings = indexedProcessTreeChildren(tree, parent);
if (siblings.length === 1) {
metadata.isOnlyChild = true;
metadata.lastChildWidth = width;
metadata.firstChildWidth = width;
} else {
const firstChildWidth = widths.get(siblings[0]);
const lastChildWidth = widths.get(siblings[0]);
if (firstChildWidth === undefined || lastChildWidth === undefined) {
/**
* All widths have been precalcluated, so this will not happen.
*/
throw new Error();
}
metadata.isOnlyChild = false;
metadata.firstChildWidth = firstChildWidth;
metadata.lastChildWidth = lastChildWidth;
}
yield metadata as ProcessWithWidthMetadata;
}
}
}
function processPositions(
indexedProcessTree: IndexedProcessTree,
widths: ProcessWidths
): ProcessPositions {
const positions = new Map<ProcessEvent, Vector2>();
/**
* This algorithm iterates the tree in level order. It keeps counters that are reset for each parent.
* By keeping track of the last parent node, we can know when we are dealing with a new set of siblings and
* reset the counters.
*/
let lastProcessedParentNode: ProcessEvent | undefined;
/**
* Nodes are positioned relative to their siblings. We walk this in level order, so we handle
* children left -> right.
*
* The width of preceding siblings is used to left align the node.
* The number of preceding siblings is important because each sibling must be 1 unit apart
* on the x axis.
*/
let numberOfPrecedingSiblings = 0;
let runningWidthOfPrecedingSiblings = 0;
for (const metadata of levelOrderWithWidths(indexedProcessTree, widths)) {
// Handle root node
if (metadata.parent === null) {
const { process } = metadata;
/**
* Place the root node at (0, 0) for now.
*/
positions.set(process, [0, 0]);
} else {
const { process, parent, width, parentWidth } = metadata;
// Reinit counters when parent changes
if (lastProcessedParentNode !== parent) {
numberOfPrecedingSiblings = 0;
runningWidthOfPrecedingSiblings = 0;
// keep track of this so we know when to reinitialize
lastProcessedParentNode = parent;
}
const parentPosition = positions.get(parent);
if (parentPosition === undefined) {
/**
* Since this algorithm populates the `positions` map in level order,
* the parent node will have been processed already and the parent position
* will always be available.
*
* This will never happen.
*/
throw new Error();
}
/**
* The x 'offset' is added to the x value of the parent to determine the position of the node.
* We add `parentWidth / -2` in order to align the left side of this node with the left side of its parent.
* We add `numberOfPrecedingSiblings * distanceBetweenNodes` in order to keep each node 1 apart on the x axis.
* We add `runningWidthOfPrecedingSiblings` so that we don't overlap with our preceding siblings. We stack em up.
* We add `width / 2` so that we center the node horizontally (in case it has non-0 width.)
*/
const xOffset =
parentWidth / -2 +
numberOfPrecedingSiblings * distanceBetweenNodes +
runningWidthOfPrecedingSiblings +
width / 2;
/**
* The y axis gains `-distanceBetweenNodes` as we move down the screen 1 unit at a time.
*/
const position = vector2Add([xOffset, -distanceBetweenNodes], parentPosition);
positions.set(process, position);
numberOfPrecedingSiblings += 1;
runningWidthOfPrecedingSiblings += width;
}
}
return positions;
}
export const processNodePositionsAndEdgeLineSegments = createSelector(
graphableProcesses,
function processNodePositionsAndEdgeLineSegments(
/* eslint-disable no-shadow */
graphableProcesses
/* eslint-enable no-shadow */
) {
/**
* Index the tree, creating maps from id -> node and id -> children
*/
const indexedProcessTree = indexedProcessTreeFactory(graphableProcesses);
/**
* Walk the tree in reverse level order, calculating the 'width' of subtrees.
*/
const widths = widthsOfProcessSubtrees(indexedProcessTree);
/**
* Walk the tree in level order. Using the precalculated widths, calculate the position of nodes.
* Nodes are positioned relative to their parents and preceding siblings.
*/
const positions = processPositions(indexedProcessTree, widths);
/**
* With the widths and positions precalculated, we calculate edge line segments (arrays of vector2s)
* which connect them in a 'pitchfork' design.
*/
const edgeLineSegments = processEdgeLineSegments(indexedProcessTree, widths, positions);
/**
* Transform the positions of nodes and edges so they seem like they are on an isometric grid.
*/
const transformedEdgeLineSegments: EdgeLineSegment[] = [];
const transformedPositions = new Map<ProcessEvent, Vector2>();
for (const [processEvent, position] of positions) {
transformedPositions.set(processEvent, applyMatrix3(position, isometricTransformMatrix));
}
for (const edgeLineSegment of edgeLineSegments) {
const transformedSegment = [];
for (const point of edgeLineSegment) {
transformedSegment.push(applyMatrix3(point, isometricTransformMatrix));
}
transformedEdgeLineSegments.push(transformedSegment);
}
return {
processNodePositions: transformedPositions,
edgeLineSegments: transformedEdgeLineSegments,
};
}
);

View file

@ -0,0 +1,47 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { createStore, StoreEnhancer } from 'redux';
import { ResolverAction } from '../types';
import { resolverReducer } from './reducer';
export const storeFactory = () => {
/**
* Redux Devtools extension exposes itself via a property on the global object.
* This interface can be used to cast `window` to a type that may expose Redux Devtools.
*/
interface SomethingThatMightHaveReduxDevTools {
__REDUX_DEVTOOLS_EXTENSION__?: (options?: PartialReduxDevToolsOptions) => StoreEnhancer;
}
/**
* Some of the options that can be passed when configuring Redux Devtools.
*/
interface PartialReduxDevToolsOptions {
/**
* A name for this store
*/
name?: string;
/**
* A list of action types to ignore. This is used to ignore high frequency events created by a mousemove handler
*/
actionsBlacklist?: readonly string[];
}
const windowWhichMightHaveReduxDevTools = window as SomethingThatMightHaveReduxDevTools;
// Make sure blacklisted action types are valid
const actionsBlacklist: ReadonlyArray<ResolverAction['type']> = ['userMovedPointer'];
const store = createStore(
resolverReducer,
windowWhichMightHaveReduxDevTools.__REDUX_DEVTOOLS_EXTENSION__ &&
windowWhichMightHaveReduxDevTools.__REDUX_DEVTOOLS_EXTENSION__({
name: 'Resolver',
actionsBlacklist,
})
);
return {
store,
};
};

View file

@ -0,0 +1,14 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { Reducer, combineReducers } from 'redux';
import { cameraReducer } from './camera/reducer';
import { dataReducer } from './data/reducer';
import { ResolverState, ResolverAction } from '../types';
export const resolverReducer: Reducer<ResolverState, ResolverAction> = combineReducers({
camera: cameraReducer,
data: dataReducer,
});

View file

@ -0,0 +1,67 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import * as cameraSelectors from './camera/selectors';
import * as dataSelectors from './data/selectors';
import { ResolverState } from '../types';
/**
* A matrix that when applied to a Vector2 will convert it from world coordinates to screen coordinates.
* See https://en.wikipedia.org/wiki/Orthographic_projection
*/
export const projectionMatrix = composeSelectors(
cameraStateSelector,
cameraSelectors.projectionMatrix
);
/**
* A matrix that when applied to a Vector2 converts it from screen coordinates to world coordinates.
* See https://en.wikipedia.org/wiki/Orthographic_projection
*/
export const inverseProjectionMatrix = composeSelectors(
cameraStateSelector,
cameraSelectors.inverseProjectionMatrix
);
/**
* The scale by which world values are scaled when rendered.
*/
export const scale = composeSelectors(cameraStateSelector, cameraSelectors.scale);
/**
* Whether or not the user is current panning the map.
*/
export const userIsPanning = composeSelectors(cameraStateSelector, cameraSelectors.userIsPanning);
export const processNodePositionsAndEdgeLineSegments = composeSelectors(
dataStateSelector,
dataSelectors.processNodePositionsAndEdgeLineSegments
);
/**
* Returns the camera state from within ResolverState
*/
function cameraStateSelector(state: ResolverState) {
return state.camera;
}
/**
* Returns the data state from within ResolverState
*/
function dataStateSelector(state: ResolverState) {
return state.data;
}
/**
* Calls the `secondSelector` with the result of the `selector`. Use this when re-exporting a
* concern-specific selector. `selector` should return the concern-specific state.
*/
function composeSelectors<OuterState, InnerState, ReturnValue>(
selector: (state: OuterState) => InnerState,
secondSelector: (state: InnerState) => ReturnValue
): (state: OuterState) => ReturnValue {
return state => secondSelector(selector(state));
}

View file

@ -0,0 +1,184 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
export { ResolverAction } from './actions';
/**
* Redux state for the Resolver feature. Properties on this interface are populated via multiple reducers using redux's `combineReducers`.
*/
export interface ResolverState {
/**
* Contains the state of the camera. This includes panning interactions, transform, and projection.
*/
readonly camera: CameraState;
/**
* Contains the state associated with event data (process events and possibly other event types).
*/
readonly data: DataState;
}
interface PanningState {
/**
* Screen coordinate vector representing the starting point when panning.
*/
readonly origin: Vector2;
/**
* Screen coordinate vector representing the current point when panning.
*/
readonly currentOffset: Vector2;
}
/**
* Redux state for the virtual 'camera' used by Resolver.
*/
export interface CameraState {
/**
* Contains the starting and current position of the pointer when the user is panning the map.
*/
readonly panning?: PanningState;
/**
* Scales the coordinate system, used for zooming.
*/
readonly scaling: Vector2;
/**
* The size (in pixels) of the Resolver component.
*/
readonly rasterSize: Vector2;
/**
* The camera world transform not counting any change from panning. When panning finishes, this value is updated to account for it.
* Use the `transform` selector to get the transform adjusted for panning.
*/
readonly translationNotCountingCurrentPanning: Vector2;
/**
* The world coordinates that the pointing device was last over. This is used during mousewheel zoom.
*/
readonly latestFocusedWorldCoordinates: Vector2 | null;
}
/**
* State for `data` reducer which handles receiving Resolver data from the backend.
*/
export interface DataState {
readonly results: readonly ProcessEvent[];
}
export type Vector2 = readonly [number, number];
export type Vector3 = readonly [number, number, number];
/**
* A rectangle with sides that align with the `x` and `y` axises.
*/
export interface AABB {
/**
* Vector who's `x` component is the _left_ side of the AABB and who's `y` component is the _bottom_ side of the AABB.
**/
readonly minimum: Vector2;
/**
* Vector who's `x` component is the _right_ side of the AABB and who's `y` component is the _bottom_ side of the AABB.
**/
readonly maximum: Vector2;
}
/**
* A 2D transformation matrix in row-major order.
*/
export type Matrix3 = readonly [
number,
number,
number,
number,
number,
number,
number,
number,
number
];
type eventSubtypeFull =
| 'creation_event'
| 'fork_event'
| 'exec_event'
| 'already_running'
| 'termination_event';
type eventTypeFull = 'process_event';
/**
* The 'events' which contain process data and are used to model Resolver.
*/
export interface ProcessEvent {
readonly event_timestamp: number;
readonly event_type: number;
readonly machine_id: string;
readonly data_buffer: {
event_subtype_full: eventSubtypeFull;
event_type_full: eventTypeFull;
node_id: number;
source_id?: number;
process_name: string;
process_path: string;
};
}
/**
* A represention of a process tree with indices for O(1) access to children and values by id.
*/
export interface IndexedProcessTree {
/**
* Map of ID to a process's children
*/
idToChildren: Map<number | undefined, ProcessEvent[]>;
/**
* Map of ID to process
*/
idToProcess: Map<number, ProcessEvent>;
}
/**
* A map of ProcessEvents (representing process nodes) to the 'width' of their subtrees as calculated by `widthsOfProcessSubtrees`
*/
export type ProcessWidths = Map<ProcessEvent, number>;
/**
* Map of ProcessEvents (representing process nodes) to their positions. Calculated by `processPositions`
*/
export type ProcessPositions = Map<ProcessEvent, Vector2>;
/**
* An array of vectors2 forming an polyline. Used to connect process nodes in the graph.
*/
export type EdgeLineSegment = Vector2[];
/**
* Used to provide precalculated info from `widthsOfProcessSubtrees`. These 'width' values are used in the layout of the graph.
*/
export type ProcessWithWidthMetadata = {
process: ProcessEvent;
width: number;
} & (
| {
parent: ProcessEvent;
parentWidth: number;
isOnlyChild: boolean;
firstChildWidth: number;
lastChildWidth: number;
}
| {
parent: null;
/* Without a parent, there is no parent width */
parentWidth: null;
/* Without a parent, we can't be an only child */
isOnlyChild: null;
/** If there is no parent, there are no siblings */
lastChildWidth: null;
firstChildWidth: null;
}
);

View file

@ -0,0 +1,75 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import React from 'react';
import styled from 'styled-components';
import { useSelector } from 'react-redux';
import { applyMatrix3, distance, angle } from '../lib/vector2';
import { Vector2 } from '../types';
import * as selectors from '../store/selectors';
/**
* A placeholder line segment view that connects process nodes.
*/
export const EdgeLine = styled(
React.memo(
({
className,
startPosition,
endPosition,
}: {
/**
* A className string provided by `styled`
*/
className?: string;
/**
* The postion of first point in the line segment. In 'world' coordinates.
*/
startPosition: Vector2;
/**
* The postion of second point in the line segment. In 'world' coordinates.
*/
endPosition: Vector2;
}) => {
/**
* Convert the start and end positions, which are in 'world' coordinates,
* to `left` and `top` css values.
*/
const projectionMatrix = useSelector(selectors.projectionMatrix);
const screenStart = applyMatrix3(startPosition, projectionMatrix);
const screenEnd = applyMatrix3(endPosition, projectionMatrix);
/**
* We render the line using a short, long, `div` element. The length of this `div`
* should be the same as the distance between the start and end points.
*/
const length = distance(screenStart, screenEnd);
const style = {
left: screenStart[0] + 'px',
top: screenStart[1] + 'px',
width: length + 'px',
/**
* Transform from the left of the div, as the left side of the `div` is positioned
* at the start point of the line segment.
*/
transformOrigin: 'top left',
/**
* Translate the `div` in the y axis to accomodate for the height of the `div`.
* Also rotate the `div` in the z axis so that it's angle matches the angle
* between the start and end points.
*/
transform: `translateY(-50%) rotateZ(${angle(screenStart, screenEnd)}rad)`,
};
return <div className={className} style={style} />;
}
)
)`
position: absolute;
height: 3px;
background-color: #d4d4d4;
color: #333333;
`;

View file

@ -0,0 +1,162 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import React, { useCallback, useState, useEffect } from 'react';
import { Store } from 'redux';
import { Provider, useSelector, useDispatch } from 'react-redux';
import styled from 'styled-components';
import { ResolverState, ResolverAction } from '../types';
import * as selectors from '../store/selectors';
import { useAutoUpdatingClientRect } from './use_autoupdating_client_rect';
import { useNonPassiveWheelHandler } from './use_nonpassive_wheel_handler';
import { ProcessEventDot } from './process_event_dot';
import { EdgeLine } from './edge_line';
export const AppRoot = React.memo(({ store }: { store: Store<ResolverState, ResolverAction> }) => {
return (
<Provider store={store}>
<Resolver />
</Provider>
);
});
const Resolver = styled(
React.memo(({ className }: { className?: string }) => {
const dispatch: (action: ResolverAction) => unknown = useDispatch();
const { processNodePositions, edgeLineSegments } = useSelector(
selectors.processNodePositionsAndEdgeLineSegments
);
const [ref, setRef] = useState<null | HTMLDivElement>(null);
const userIsPanning = useSelector(selectors.userIsPanning);
const [elementBoundingClientRect, clientRectCallback] = useAutoUpdatingClientRect();
const relativeCoordinatesFromMouseEvent = useCallback(
(event: { clientX: number; clientY: number }): null | [number, number] => {
if (elementBoundingClientRect === null) {
return null;
}
return [
event.clientX - elementBoundingClientRect.x,
event.clientY - elementBoundingClientRect.y,
];
},
[elementBoundingClientRect]
);
useEffect(() => {
if (elementBoundingClientRect !== null) {
dispatch({
type: 'userSetRasterSize',
payload: [elementBoundingClientRect.width, elementBoundingClientRect.height],
});
}
}, [dispatch, elementBoundingClientRect]);
const handleMouseDown = useCallback(
(event: React.MouseEvent<HTMLDivElement>) => {
const maybeCoordinates = relativeCoordinatesFromMouseEvent(event);
if (maybeCoordinates !== null) {
dispatch({
type: 'userStartedPanning',
payload: maybeCoordinates,
});
}
},
[dispatch, relativeCoordinatesFromMouseEvent]
);
const handleMouseMove = useCallback(
(event: MouseEvent) => {
const maybeCoordinates = relativeCoordinatesFromMouseEvent(event);
if (maybeCoordinates) {
dispatch({
type: 'userMovedPointer',
payload: maybeCoordinates,
});
}
},
[dispatch, relativeCoordinatesFromMouseEvent]
);
const handleMouseUp = useCallback(() => {
if (userIsPanning) {
dispatch({
type: 'userStoppedPanning',
});
}
}, [dispatch, userIsPanning]);
const handleWheel = useCallback(
(event: WheelEvent) => {
// we use elementBoundingClientRect to interpret pixel deltas as a fraction of the element's height
if (
elementBoundingClientRect !== null &&
event.ctrlKey &&
event.deltaY !== 0 &&
event.deltaMode === 0
) {
event.preventDefault();
dispatch({
type: 'userZoomed',
payload: (-2 * event.deltaY) / elementBoundingClientRect.height,
});
}
},
[elementBoundingClientRect, dispatch]
);
useEffect(() => {
window.addEventListener('mouseup', handleMouseUp, { passive: true });
return () => {
window.removeEventListener('mouseup', handleMouseUp);
};
}, [handleMouseUp]);
useEffect(() => {
window.addEventListener('mousemove', handleMouseMove, { passive: true });
return () => {
window.removeEventListener('mousemove', handleMouseMove);
};
}, [handleMouseMove]);
const refCallback = useCallback(
(node: null | HTMLDivElement) => {
setRef(node);
clientRectCallback(node);
},
[clientRectCallback]
);
useNonPassiveWheelHandler(handleWheel, ref);
return (
<div
data-test-subj="resolverEmbeddable"
className={className}
ref={refCallback}
onMouseDown={handleMouseDown}
>
{Array.from(processNodePositions).map(([processEvent, position], index) => (
<ProcessEventDot key={index} position={position} event={processEvent} />
))}
{edgeLineSegments.map(([startPosition, endPosition], index) => (
<EdgeLine key={index} startPosition={startPosition} endPosition={endPosition} />
))}
</div>
);
})
)`
/**
* Take up all availble space
*/
display: flex;
flex-grow: 1;
position: relative;
`;

View file

@ -0,0 +1,72 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import React from 'react';
import styled from 'styled-components';
import { useSelector } from 'react-redux';
import { applyMatrix3 } from '../lib/vector2';
import { Vector2, ProcessEvent } from '../types';
import * as selectors from '../store/selectors';
/**
* A placeholder view for a process node.
*/
export const ProcessEventDot = styled(
React.memo(
({
className,
position,
event,
}: {
/**
* A `className` string provided by `styled`
*/
className?: string;
/**
* The positon of the process node, in 'world' coordinates.
*/
position: Vector2;
/**
* An event which contains details about the process node.
*/
event: ProcessEvent;
}) => {
/**
* Convert the position, which is in 'world' coordinates, to screen coordinates.
*/
const projectionMatrix = useSelector(selectors.projectionMatrix);
const [left, top] = applyMatrix3(position, projectionMatrix);
const style = {
left: (left - 20).toString() + 'px',
top: (top - 20).toString() + 'px',
};
return (
<span className={className} style={style}>
name: {event.data_buffer.process_name}
<br />
x: {position[0]}
<br />
y: {position[1]}
</span>
);
}
)
)`
position: absolute;
width: 40px;
height: 40px;
text-align: left;
font-size: 10px;
/**
* Give the element a button-like appearance.
*/
user-select: none;
border: 1px solid black;
box-sizing: border-box;
border-radius: 10%;
padding: 4px;
white-space: nowrap;
`;

View file

@ -0,0 +1,43 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { useCallback, useState, useEffect, useRef } from 'react';
import ResizeObserver from 'resize-observer-polyfill';
/**
* Returns a nullable DOMRect and a ref callback. Pass the refCallback to the
* `ref` property of a native element and this hook will return a DOMRect for
* it by calling `getBoundingClientRect`. This hook will observe the element
* with a resize observer and call getBoundingClientRect again after resizes.
*
* Note that the changes to the position of the element aren't automatically
* tracked. So if the element's position moves for some reason, be sure to
* handle that.
*/
export function useAutoUpdatingClientRect(): [DOMRect | null, (node: Element | null) => void] {
const [rect, setRect] = useState<DOMRect | null>(null);
const nodeRef = useRef<Element | null>(null);
const ref = useCallback((node: Element | null) => {
nodeRef.current = node;
if (node !== null) {
setRect(node.getBoundingClientRect());
}
}, []);
useEffect(() => {
if (nodeRef.current !== null) {
const resizeObserver = new ResizeObserver(entries => {
if (nodeRef.current !== null && nodeRef.current === entries[0].target) {
setRect(nodeRef.current.getBoundingClientRect());
}
});
resizeObserver.observe(nodeRef.current);
return () => {
resizeObserver.disconnect();
};
}
}, [nodeRef]);
return [rect, ref];
}

View file

@ -0,0 +1,26 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import { useEffect } from 'react';
/**
* Register an event handler directly on `elementRef` for the `wheel` event, with no options
* React sets native event listeners on the `window` and calls provided handlers via event propagation.
* As of Chrome 73, `'wheel'` events on `window` are automatically treated as 'passive'.
* If you don't need to call `event.preventDefault` then you should use regular React event handling instead.
*/
export function useNonPassiveWheelHandler(
handler: (event: WheelEvent) => void,
elementRef: HTMLElement | null
) {
useEffect(() => {
if (elementRef !== null) {
elementRef.addEventListener('wheel', handler);
return () => {
elementRef.removeEventListener('wheel', handler);
};
}
}, [elementRef, handler]);
}

View file

@ -26,7 +26,6 @@ export class EndpointPlugin
EndpointPluginStartDependencies
> {
public setup(core: CoreSetup, plugins: EndpointPluginSetupDependencies) {
const resolverEmbeddableFactory = new ResolverEmbeddableFactory();
core.application.register({
id: 'endpoint',
title: i18n.translate('xpack.endpoint.pluginTitle', {
@ -39,6 +38,8 @@ export class EndpointPlugin
},
});
const resolverEmbeddableFactory = new ResolverEmbeddableFactory();
plugins.embeddable.registerEmbeddableFactory(
resolverEmbeddableFactory.type,
resolverEmbeddableFactory

View file

@ -10,6 +10,7 @@ import { AppMountParameters } from 'kibana/public';
import { I18nProvider } from '@kbn/i18n/react';
import { IEmbeddable } from 'src/plugins/embeddable/public';
import { useEffect } from 'react';
import styled from 'styled-components';
/**
* This module will be loaded asynchronously to reduce the bundle size of your plugin's main bundle.
@ -18,6 +19,12 @@ export function renderApp(
{ element }: AppMountParameters,
embeddable: Promise<IEmbeddable | undefined>
) {
/**
* The application DOM node should take all available space.
*/
element.style.display = 'flex';
element.style.flexGrow = '1';
ReactDOM.render(
<I18nProvider>
<AppRoot embeddable={embeddable} />
@ -30,34 +37,89 @@ export function renderApp(
};
}
const AppRoot = React.memo(
({ embeddable: embeddablePromise }: { embeddable: Promise<IEmbeddable | undefined> }) => {
const [embeddable, setEmbeddable] = React.useState<IEmbeddable | undefined>(undefined);
const [renderTarget, setRenderTarget] = React.useState<HTMLDivElement | null>(null);
const AppRoot = styled(
React.memo(
({
embeddable: embeddablePromise,
className,
}: {
/**
* A promise which resolves to the Resolver embeddable.
*/
embeddable: Promise<IEmbeddable | undefined>;
/**
* A `className` string provided by `styled`
*/
className?: string;
}) => {
/**
* This state holds the reference to the embeddable, once resolved.
*/
const [embeddable, setEmbeddable] = React.useState<IEmbeddable | undefined>(undefined);
/**
* This state holds the reference to the DOM node that will contain the embeddable.
*/
const [renderTarget, setRenderTarget] = React.useState<HTMLDivElement | null>(null);
useEffect(() => {
let cleanUp;
Promise.race([
new Promise<never>((_resolve, reject) => {
/**
* Keep component state with the Resolver embeddable.
*
* If the reference to the embeddablePromise changes, we ignore the stale promise.
*/
useEffect(() => {
/**
* A promise rejection function that will prevent a stale embeddable promise from being resolved
* as the current eembeddable.
*
* If the embeddablePromise itself changes before the old one is resolved, we cancel and restart this effect.
*/
let cleanUp;
const cleanupPromise = new Promise<never>((_resolve, reject) => {
cleanUp = reject;
}),
embeddablePromise,
]).then(value => {
setEmbeddable(value);
});
});
return cleanUp;
}, [embeddablePromise]);
/**
* Either set the embeddable in state, or cancel and restart this process.
*/
Promise.race([cleanupPromise, embeddablePromise]).then(value => {
setEmbeddable(value);
});
useEffect(() => {
if (embeddable && renderTarget) {
embeddable.render(renderTarget);
return () => {
embeddable.destroy();
};
}
}, [embeddable, renderTarget]);
/**
* If `embeddablePromise` is changed, the cleanup function is run.
*/
return cleanUp;
}, [embeddablePromise]);
return <div data-test-subj="resolverEmbeddableContainer" ref={setRenderTarget} />;
}
);
/**
* Render the eembeddable into the DOM node.
*/
useEffect(() => {
if (embeddable && renderTarget) {
embeddable.render(renderTarget);
/**
* If the embeddable or DOM node changes then destroy the old embeddable.
*/
return () => {
embeddable.destroy();
};
}
}, [embeddable, renderTarget]);
return (
<div
className={className}
data-test-subj="resolverEmbeddableContainer"
ref={setRenderTarget}
/>
);
}
)
)`
/**
* Take all available space.
*/
display: flex;
flex-grow: 1;
`;

View file

@ -26,6 +26,12 @@ export class ResolverTestPlugin
private resolveEmbeddable!: (
value: IEmbeddable | undefined | PromiseLike<IEmbeddable | undefined> | undefined
) => void;
/**
* We register our application during the `setup` phase, but the embeddable
* plugin API is not available until the `start` phase. In order to access
* the embeddable API from our application, we pass a Promise to the application
* which we resolve during the `start` phase.
*/
private embeddablePromise: Promise<IEmbeddable | undefined> = new Promise<
IEmbeddable | undefined
>(resolve => {
@ -39,6 +45,9 @@ export class ResolverTestPlugin
}),
mount: async (_context, params) => {
const { renderApp } = await import('./applications/resolver_test');
/**
* Pass a promise which resolves to the Resolver embeddable.
*/
return renderApp(params, this.embeddablePromise);
},
});
@ -47,6 +56,9 @@ export class ResolverTestPlugin
public start(...args: [unknown, { embeddable: IEmbeddableStart }]) {
const [, plugins] = args;
const factory = plugins.embeddable.getEmbeddableFactory('resolver');
/**
* Provide the Resolver embeddable to the application
*/
this.resolveEmbeddable(factory.create({ id: 'test basic render' }));
}
public stop() {}

View file

@ -4076,6 +4076,16 @@
"@types/react" "*"
redux "^4.0.0"
"@types/react-redux@^7.1.0":
version "7.1.5"
resolved "https://registry.yarnpkg.com/@types/react-redux/-/react-redux-7.1.5.tgz#c7a528d538969250347aa53c52241051cf886bd3"
integrity sha512-ZoNGQMDxh5ENY7PzU7MVonxDzS1l/EWiy8nUhDqxFqUZn4ovboCyvk4Djf68x6COb7vhGTKjyjxHxtFdAA5sUA==
dependencies:
"@types/hoist-non-react-statics" "^3.3.0"
"@types/react" "*"
hoist-non-react-statics "^3.3.0"
redux "^4.0.0"
"@types/react-resize-detector@^4.0.1":
version "4.0.1"
resolved "https://registry.yarnpkg.com/@types/react-resize-detector/-/react-resize-detector-4.0.1.tgz#cc8f012f5957e4826e69b8d2afd59baadcac556c"