Support for date_nanos type timestamps in context view (#38023)

* Reenable context view for time_nanos based index patterns
This commit is contained in:
Matthias Wilhelm 2019-06-11 15:04:54 +02:00 committed by GitHub
parent c7349fe476
commit a33201cf39
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 433 additions and 169 deletions

View file

@ -90,7 +90,7 @@ directive including its respective styles.
**api/anchor.js**: Exports `fetchAnchor()` that creates and executes the
query for the anchor document.
**api/context.js**: Exports `fetchPredecessors()` and `fetchSuccessors()` that
**api/context.js**: Exports `fetchPredecessors()`, `fetchSuccessors()`, `fetchSurroundingDocs()` that
create and execute the queries for the preceeding and succeeding documents.
**api/utils**: Exports various functions used to create and transform

View file

@ -25,6 +25,7 @@ export function createIndexPatternsStub() {
get: sinon.spy(indexPatternId =>
Promise.resolve({
id: indexPatternId,
isTimeNanosBased: () => false
})
),
};

View file

@ -91,7 +91,7 @@ describe('context app', function () {
return fetchAnchor('INDEX_PATTERN_ID', 'doc', 'id', [{ '@timestamp': 'desc' }, { '_doc': 'desc' }])
.then(() => {
const setFieldSpy = searchSourceStub.setField;
expect(setFieldSpy.firstCall.args[1]).to.eql({ id: 'INDEX_PATTERN_ID' });
expect(setFieldSpy.firstCall.args[1].id).to.eql('INDEX_PATTERN_ID');
});
});

View file

@ -28,6 +28,10 @@ import { SearchSourceProvider } from 'ui/courier';
import { fetchContextProvider } from '../context';
const MS_PER_DAY = 24 * 60 * 60 * 1000;
const ANCHOR_TIMESTAMP = (new Date(MS_PER_DAY)).toJSON();
const ANCHOR_TIMESTAMP_3 = (new Date(MS_PER_DAY * 3)).toJSON();
const ANCHOR_TIMESTAMP_1000 = (new Date(MS_PER_DAY * 1000)).toJSON();
const ANCHOR_TIMESTAMP_3000 = (new Date(MS_PER_DAY * 3000)).toJSON();
describe('context app', function () {
beforeEach(ngMock.module('kibana'));
@ -61,6 +65,7 @@ describe('context app', function () {
'INDEX_PATTERN_ID',
'@timestamp',
'desc',
ANCHOR_TIMESTAMP_3000,
MS_PER_DAY * 3000,
'_doc',
0,
@ -87,6 +92,7 @@ describe('context app', function () {
'INDEX_PATTERN_ID',
'@timestamp',
'desc',
ANCHOR_TIMESTAMP_3000,
MS_PER_DAY * 3000,
'_doc',
0,
@ -122,6 +128,7 @@ describe('context app', function () {
'INDEX_PATTERN_ID',
'@timestamp',
'desc',
ANCHOR_TIMESTAMP_1000,
MS_PER_DAY * 1000,
'_doc',
0,
@ -138,7 +145,6 @@ describe('context app', function () {
// should have stopped before reaching MS_PER_DAY * 1700
expect(moment(_.last(intervals).lte).valueOf()).to.be.lessThan(MS_PER_DAY * 1700);
expect(intervals.length).to.be.greaterThan(1);
expect(hits).to.eql(searchSourceStub._stubHits.slice(-3));
});
});
@ -148,6 +154,7 @@ describe('context app', function () {
'INDEX_PATTERN_ID',
'@timestamp',
'desc',
ANCHOR_TIMESTAMP_3,
MS_PER_DAY * 3,
'_doc',
0,
@ -166,6 +173,7 @@ describe('context app', function () {
'INDEX_PATTERN_ID',
'@timestamp',
'desc',
ANCHOR_TIMESTAMP_3,
MS_PER_DAY * 3,
'_doc',
0,
@ -186,6 +194,7 @@ describe('context app', function () {
'INDEX_PATTERN_ID',
'@timestamp',
'desc',
ANCHOR_TIMESTAMP,
MS_PER_DAY,
'_doc',
0,

View file

@ -28,6 +28,9 @@ import { SearchSourceProvider } from 'ui/courier';
import { fetchContextProvider } from '../context';
const MS_PER_DAY = 24 * 60 * 60 * 1000;
const ANCHOR_TIMESTAMP = (new Date(MS_PER_DAY)).toJSON();
const ANCHOR_TIMESTAMP_3 = (new Date(MS_PER_DAY * 3)).toJSON();
const ANCHOR_TIMESTAMP_3000 = (new Date(MS_PER_DAY * 3000)).toJSON();
describe('context app', function () {
beforeEach(ngMock.module('kibana'));
@ -61,6 +64,7 @@ describe('context app', function () {
'INDEX_PATTERN_ID',
'@timestamp',
'desc',
ANCHOR_TIMESTAMP_3000,
MS_PER_DAY * 3000,
'_doc',
0,
@ -87,6 +91,7 @@ describe('context app', function () {
'INDEX_PATTERN_ID',
'@timestamp',
'desc',
ANCHOR_TIMESTAMP_3000,
MS_PER_DAY * 3000,
'_doc',
0,
@ -124,6 +129,7 @@ describe('context app', function () {
'INDEX_PATTERN_ID',
'@timestamp',
'desc',
ANCHOR_TIMESTAMP_3000,
MS_PER_DAY * 3000,
'_doc',
0,
@ -150,6 +156,7 @@ describe('context app', function () {
'INDEX_PATTERN_ID',
'@timestamp',
'desc',
ANCHOR_TIMESTAMP_3,
MS_PER_DAY * 3,
'_doc',
0,
@ -168,6 +175,7 @@ describe('context app', function () {
'INDEX_PATTERN_ID',
'@timestamp',
'desc',
ANCHOR_TIMESTAMP_3,
MS_PER_DAY * 3,
'_doc',
0,
@ -188,6 +196,7 @@ describe('context app', function () {
'INDEX_PATTERN_ID',
'@timestamp',
'desc',
ANCHOR_TIMESTAMP,
MS_PER_DAY,
'_doc',
0,

View file

@ -18,12 +18,15 @@
*/
// @ts-check
// @ts-ignore
import { SearchSourceProvider } from 'ui/courier';
import moment from 'moment';
import { reverseSortDirection } from './utils/sorting';
import {
extractNanoSeconds,
convertIsoToNanosAsStr,
convertIsoToMillis,
convertTimeValueToIso
} from './utils/date_conversion';
/**
* @typedef {Object} SearchResult
@ -42,6 +45,10 @@ import { reverseSortDirection } from './utils/sorting';
* @typedef {'asc' | 'desc'} SortDirection
*/
/**
* @typedef {'successors' |'predecessors'} SurroundingDocType
*/
const DAY_MILLIS = 24 * 60 * 60 * 1000;
// look from 1 day up to 10000 days into the past and future
@ -54,110 +61,93 @@ function fetchContextProvider(indexPatterns, Private) {
const SearchSource = Private(SearchSourceProvider);
return {
fetchPredecessors,
fetchSuccessors,
// @ts-ignore / for testing
fetchPredecessors: (...args) => fetchSurroundingDocs('predecessors', ...args),
// @ts-ignore / for testing
fetchSuccessors: (...args) => fetchSurroundingDocs('successors', ...args),
fetchSurroundingDocs,
};
async function fetchSuccessors(
/**
* Fetch successor or predecessor documents of a given anchor document
*
* @param {SurroundingDocType} type - `successors` or `predecessors`
* @param {string} indexPatternId
* @param {string} timeFieldName - name of the timefield, that's sorted on
* @param {SortDirection} timeFieldSortDir - direction of sorting
* @param {string} timeFieldIsoValue - value of the anchors timefield in ISO format
* @param {number} timeFieldNumValue - value of the anchors timefield in numeric format (invalid for nanos)
* @param {string} tieBreakerField - name of 2nd param for sorting
* @param {string} tieBreakerValue - value of 2nd param for sorting
* @param {number} size - number of records to retrieve
* @param {any[]} filters - to apply in the elastic query
* @returns {Promise<object[]>}
*/
async function fetchSurroundingDocs(
type,
indexPatternId,
timeField,
timeSortDirection,
timeValue,
timeFieldName,
timeFieldSortDir,
timeFieldIsoValue,
timeFieldNumValue,
tieBreakerField,
tieBreakerValue,
size,
filters
) {
const searchSource = await createSearchSource(indexPatternId, filters);
const offsetSign = timeSortDirection === 'asc' ? 1 : -1;
const indexPattern = await indexPatterns.get(indexPatternId);
const searchSource = await createSearchSource(indexPattern, filters);
const sortDir = type === 'successors' ? timeFieldSortDir : reverseSortDirection(timeFieldSortDir);
const nanoSeconds = indexPattern.isTimeNanosBased() ? extractNanoSeconds(timeFieldIsoValue) : '';
const timeValueMillis = nanoSeconds !== '' ? convertIsoToMillis(timeFieldIsoValue) : timeFieldNumValue;
const offsetSign = (timeFieldSortDir === 'asc' && type === 'successors' || timeFieldSortDir === 'desc' && type === 'predecessors')
? 1
: -1;
// ending with `null` opens the last interval
const intervals = asPairs([...LOOKUP_OFFSETS.map(offset => timeValue + offset * offsetSign), null]);
const intervals = asPairs([...LOOKUP_OFFSETS.map(offset => timeValueMillis + offset * offsetSign), null]);
let successors = [];
for (const [startTimeValue, endTimeValue] of intervals) {
const remainingSize = size - successors.length;
let documents = [];
for (const [iStartTimeValue, iEndTimeValue] of intervals) {
const remainingSize = size - documents.length;
if (remainingSize <= 0) {
break;
}
const [afterTimeValue, afterTieBreakerValue] = successors.length > 0
? successors[successors.length - 1].sort
: [timeValue, tieBreakerValue];
const afterTimeRecIdx = type === 'successors' && documents.length ? documents.length - 1 : 0;
const afterTimeValue = nanoSeconds
? convertIsoToNanosAsStr(documents.length ? documents[afterTimeRecIdx]._source[timeFieldName] : timeFieldIsoValue)
: timeFieldNumValue;
const afterTieBreakerValue = documents.length > 0 ? documents[afterTimeRecIdx].sort[1] : tieBreakerValue;
const hits = await fetchHitsInInterval(
searchSource,
timeField,
timeSortDirection,
startTimeValue,
endTimeValue,
timeFieldName,
sortDir,
iStartTimeValue,
iEndTimeValue,
afterTimeValue,
tieBreakerField,
afterTieBreakerValue,
remainingSize
remainingSize,
nanoSeconds
);
successors = [...successors, ...hits];
documents = type === 'successors'
? [...documents, ...hits]
: [...hits.slice().reverse(), ...documents];
}
return successors;
}
async function fetchPredecessors(
indexPatternId,
timeField,
timeSortDirection,
timeValue,
tieBreakerField,
tieBreakerValue,
size,
filters
) {
const searchSource = await createSearchSource(indexPatternId, filters);
const offsetSign = timeSortDirection === 'desc' ? 1 : -1;
// ending with `null` opens the last interval
const intervals = asPairs([...LOOKUP_OFFSETS.map(offset => timeValue + offset * offsetSign), null]);
let predecessors = [];
for (const [startTimeValue, endTimeValue] of intervals) {
const remainingSize = size - predecessors.length;
if (remainingSize <= 0) {
break;
}
const [afterTimeValue, afterTieBreakerValue] = predecessors.length > 0
? predecessors[0].sort
: [timeValue, tieBreakerValue];
const hits = await fetchHitsInInterval(
searchSource,
timeField,
reverseSortDirection(timeSortDirection),
startTimeValue,
endTimeValue,
afterTimeValue,
tieBreakerField,
afterTieBreakerValue,
remainingSize
);
predecessors = [...hits.slice().reverse(), ...predecessors];
}
return predecessors;
return documents;
}
/**
* @param {string} indexPatternId
* @param {Object} indexPattern
* @param {any[]} filters
* @returns {Promise<Object>}
*/
async function createSearchSource(indexPatternId, filters) {
const indexPattern = await indexPatterns.get(indexPatternId);
async function createSearchSource(indexPattern, filters) {
return new SearchSource()
.setParent(false)
.setField('index', indexPattern)
@ -166,7 +156,7 @@ function fetchContextProvider(indexPatterns, Private) {
/**
* Fetch the hits between `(afterTimeValue, tieBreakerValue)` and
* `endTimeValue` from the `searchSource` using the given `timeField` and
* `endRangeMillis` from the `searchSource` using the given `timeField` and
* `tieBreakerField` fields up to a maximum of `maxCount` documents. The
* documents are sorted by `(timeField, tieBreakerField)` using the
* `timeSortDirection` for both fields
@ -175,32 +165,35 @@ function fetchContextProvider(indexPatterns, Private) {
* and filters set.
*
* @param {SearchSourceT} searchSource
* @param {string} timeField
* @param {SortDirection} timeSortDirection
* @param {number} startTimeValue
* @param {number | null} endTimeValue
* @param {number} [afterTimeValue=startTimeValue]
* @param {string} timeFieldName
* @param {SortDirection} timeFieldSortDir
* @param {number} startRangeMillis
* @param {number | null} endRangeMillis
* @param {number| string} afterTimeValue
* @param {string} tieBreakerField
* @param {number} tieBreakerValue
* @param {number} maxCount
* @param {string} nanosValue
* @returns {Promise<object[]>}
*/
async function fetchHitsInInterval(
searchSource,
timeField,
timeSortDirection,
startTimeValue,
endTimeValue,
timeFieldName,
timeFieldSortDir,
startRangeMillis,
endRangeMillis,
afterTimeValue,
tieBreakerField,
tieBreakerValue,
maxCount
maxCount,
nanosValue
) {
const startRange = {
[timeSortDirection === 'asc' ? 'gte' : 'lte']: moment(startTimeValue).toISOString(),
[timeFieldSortDir === 'asc' ? 'gte' : 'lte']: convertTimeValueToIso(startRangeMillis, nanosValue),
};
const endRange = endTimeValue === null ? {} : {
[timeSortDirection === 'asc' ? 'lte' : 'gte']: moment(endTimeValue).toISOString(),
const endRange = endRangeMillis === null ? {} : {
[timeFieldSortDir === 'asc' ? 'lte' : 'gte']: convertTimeValueToIso(endRangeMillis, nanosValue),
};
const response = await searchSource
@ -210,7 +203,7 @@ function fetchContextProvider(indexPatterns, Private) {
constant_score: {
filter: {
range: {
[timeField]: {
[timeFieldName]: {
format: 'strict_date_optional_time',
...startRange,
...endRange,
@ -222,12 +215,12 @@ function fetchContextProvider(indexPatterns, Private) {
language: 'lucene'
})
.setField('searchAfter', [
afterTimeValue !== null ? afterTimeValue : startTimeValue,
tieBreakerValue,
afterTimeValue,
tieBreakerValue
])
.setField('sort', [
{ [timeField]: timeSortDirection },
{ [tieBreakerField]: timeSortDirection },
{ [timeFieldName]: timeFieldSortDir },
{ [tieBreakerField]: timeFieldSortDir },
])
.setField('version', true)
.fetch();

View file

@ -0,0 +1,72 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import moment from 'moment';
/**
* extract nanoseconds if available in ISO timestamp
* returns the nanos as string like this:
* 9ns -> 000000009
* 10000ns -> 0000010000
*/
export function extractNanoSeconds(timeFieldValue: string = ''): string {
const fractionSeconds = timeFieldValue.split('.')[1].replace('Z', '');
return fractionSeconds.length !== 9 ? fractionSeconds.padEnd(9, '0') : fractionSeconds;
}
/**
* extract the nanoseconds as string of a given ISO formatted timestamp
*/
export function convertIsoToNanosAsStr(isoValue: string): string {
const nanos = extractNanoSeconds(isoValue);
const millis = convertIsoToMillis(isoValue);
return `${millis}${nanos.substr(3, 6)}`;
}
/**
* convert an iso formatted string to number of milliseconds since
* 1970-01-01T00:00:00.000Z
* @param {string} isoValue
* @returns {number}
*/
export function convertIsoToMillis(isoValue: string): number {
const date = new Date(isoValue);
return date.getTime();
}
/**
* the given time value in milliseconds is converted to a ISO formatted string
* if nanosValue is provided, the given value replaces the fractional seconds part
* of the formated string since moment.js doesn't support formatting timestamps
* with a higher precision then microseconds
* The browser rounds date nanos values:
* 2019-09-18T06:50:12.999999999 -> browser rounds to 1568789413000000000
* 2019-09-18T06:50:59.999999999 -> browser rounds to 1568789460000000000
* 2017-12-31T23:59:59.999999999 -> browser rounds 1514761199999999999 to 1514761200000000000
*/
export function convertTimeValueToIso(timeValueMillis: number, nanosValue: string): string | null {
if (!timeValueMillis) {
return null;
}
const isoString = moment(timeValueMillis).toISOString();
if (!isoString) {
return null;
} else if (nanosValue !== '') {
return `${isoString.substring(0, isoString.length - 4)}${nanosValue}Z`;
}
return isoString;
}

View file

@ -30,7 +30,7 @@ import { FAILURE_REASONS, LOADING_STATUS } from './constants';
export function QueryActionsProvider(Private, Promise) {
const fetchAnchor = Private(fetchAnchorProvider);
const { fetchPredecessors, fetchSuccessors } = Private(fetchContextProvider);
const { fetchSurroundingDocs } = Private(fetchContextProvider);
const {
increasePredecessorCount,
increaseSuccessorCount,
@ -92,40 +92,45 @@ export function QueryActionsProvider(Private, Promise) {
);
};
const fetchPredecessorRows = (state) => () => {
const fetchSurroundingRows = (type, state) => {
const {
queryParameters: { indexPatternId, filters, predecessorCount, sort, tieBreakerField },
queryParameters: { indexPatternId, filters, sort, tieBreakerField },
rows: { anchor },
} = state;
const count = type === 'successors'
? state.queryParameters.successorCount
: state.queryParameters.predecessorCount;
if (!tieBreakerField) {
return Promise.reject(setFailedStatus(state)('predecessors', {
return Promise.reject(setFailedStatus(state)(type, {
reason: FAILURE_REASONS.INVALID_TIEBREAKER
}));
}
setLoadingStatus(state)('predecessors');
setLoadingStatus(state)(type);
return Promise.try(() => (
fetchPredecessors(
fetchSurroundingDocs(
type,
indexPatternId,
sort[0],
sort[1],
anchor.fields[sort[0]][0],
anchor.sort[0],
tieBreakerField,
anchor.sort[1],
predecessorCount,
count,
filters
)
))
.then(
(predecessorDocuments) => {
setLoadedStatus(state)('predecessors');
state.rows.predecessors = predecessorDocuments;
return predecessorDocuments;
(documents) => {
setLoadedStatus(state)(type);
state.rows[type] = documents;
return documents;
},
(error) => {
setFailedStatus(state)('predecessors', { error });
setFailedStatus(state)(type, { error });
toastNotifications.addDanger({
title: i18n.translate('kbn.context.unableToLoadDocumentDescription', {
defaultMessage: 'Unable to load documents'
@ -137,53 +142,10 @@ export function QueryActionsProvider(Private, Promise) {
);
};
const fetchSuccessorRows = (state) => () => {
const {
queryParameters: { indexPatternId, filters, sort, successorCount, tieBreakerField },
rows: { anchor },
} = state;
if (!tieBreakerField) {
return Promise.reject(setFailedStatus(state)('successors', {
reason: FAILURE_REASONS.INVALID_TIEBREAKER
}));
}
setLoadingStatus(state)('successors');
return Promise.try(() => (
fetchSuccessors(
indexPatternId,
sort[0],
sort[1],
anchor.sort[0],
tieBreakerField,
anchor.sort[1],
successorCount,
filters
)
))
.then(
(successorDocuments) => {
setLoadedStatus(state)('successors');
state.rows.successors = successorDocuments;
return successorDocuments;
},
(error) => {
setFailedStatus(state)('successors', { error });
toastNotifications.addDanger({
title: 'Unable to load documents',
text: <MarkdownSimple>{error.message}</MarkdownSimple>,
});
throw error;
},
);
};
const fetchContextRows = (state) => () => (
Promise.all([
fetchPredecessorRows(state)(),
fetchSuccessorRows(state)(),
fetchSurroundingRows('predecessors', state),
fetchSurroundingRows('successors', state),
])
);
@ -204,22 +166,22 @@ export function QueryActionsProvider(Private, Promise) {
const fetchGivenPredecessorRows = (state) => (count) => {
setPredecessorCount(state)(count);
return fetchPredecessorRows(state)();
return fetchSurroundingRows('predecessors', state);
};
const fetchGivenSuccessorRows = (state) => (count) => {
setSuccessorCount(state)(count);
return fetchSuccessorRows(state)();
return fetchSurroundingRows('successors', state);
};
const fetchMorePredecessorRows = (state) => () => {
increasePredecessorCount(state)();
return fetchPredecessorRows(state)();
return fetchSurroundingRows('predecessors', state);
};
const fetchMoreSuccessorRows = (state) => () => {
increaseSuccessorCount(state)();
return fetchSuccessorRows(state)();
return fetchSurroundingRows('successors', state);
};
const setAllRows = (state) => (predecessorRows, anchorRow, successorRows) => (
@ -240,8 +202,6 @@ export function QueryActionsProvider(Private, Promise) {
fetchGivenSuccessorRows,
fetchMorePredecessorRows,
fetchMoreSuccessorRows,
fetchPredecessorRows,
fetchSuccessorRows,
setAllRows,
};
}

View file

@ -21,7 +21,7 @@
class="euiLink"
data-test-subj="docTableRowAction"
ng-href="{{ getContextAppHref() }}"
ng-if="indexPattern.isTimeBased() && !indexPattern.isTimeNanosBased()"
ng-if="indexPattern.isTimeBased()"
i18n-id="kbn.docTable.tableRow.viewSurroundingDocumentsLinkText"
i18n-default-message="View surrounding documents"
></a>

View file

@ -0,0 +1,82 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import expect from '@kbn/expect';
const TEST_INDEX_PATTERN = 'date-nanos';
const TEST_ANCHOR_TYPE = '_doc';
const TEST_DEFAULT_CONTEXT_SIZE = 1;
const TEST_STEP_SIZE = 3;
export default function ({ getService, getPageObjects }) {
const kibanaServer = getService('kibanaServer');
const docTable = getService('docTable');
const PageObjects = getPageObjects(['common', 'context', 'timePicker', 'discover']);
const esArchiver = getService('esArchiver');
describe('context view for date_nanos', () => {
before(async function () {
await esArchiver.loadIfNeeded('date_nanos');
await kibanaServer.uiSettings.replace({ 'defaultIndex': TEST_INDEX_PATTERN });
await kibanaServer.uiSettings.update({
'context:defaultSize': `${TEST_DEFAULT_CONTEXT_SIZE}`,
'context:step': `${TEST_STEP_SIZE}`,
});
});
after(function unloadMakelogs() {
return esArchiver.unload('date_nanos');
});
it('displays predessors - anchor - successors in right order ', async function () {
await PageObjects.context.navigateTo(TEST_INDEX_PATTERN, TEST_ANCHOR_TYPE, 'AU_x3-TaGFA8no6Qj999Z');
const table = await docTable.getTable();
const rows = await docTable.getBodyRows(table);
const actualRowsText = await Promise.all(rows.map(row => row.getVisibleText()));
const expectedRowsText = [
'Sep 18, 2019 @ 06:50:13.000000000\n-2',
'Sep 18, 2019 @ 06:50:12.999999999\n-3',
'Sep 19, 2015 @ 06:50:13.000100001\n1'
];
expect(actualRowsText).to.eql(expectedRowsText);
});
it('displays correctly when predecessors and successors are loaded', async function () {
await PageObjects.context.navigateTo(TEST_INDEX_PATTERN, TEST_ANCHOR_TYPE, 'AU_x3-TaGFA8no6Qjisd');
await PageObjects.context.clickPredecessorLoadMoreButton();
await PageObjects.context.clickSuccessorLoadMoreButton();
const table = await docTable.getTable();
const rows = await docTable.getBodyRows(table);
const actualRowsText = await Promise.all(rows.map(row => row.getVisibleText()));
const expectedRowsText = [
'Sep 22, 2019 @ 23:50:13.253123345\n5',
'Sep 18, 2019 @ 06:50:13.000000104\n4',
'Sep 18, 2019 @ 06:50:13.000000103\n2',
'Sep 18, 2019 @ 06:50:13.000000102\n1',
'Sep 18, 2019 @ 06:50:13.000000101\n0',
'Sep 18, 2019 @ 06:50:13.000000001\n-1',
'Sep 18, 2019 @ 06:50:13.000000000\n-2',
'Sep 18, 2019 @ 06:50:12.999999999\n-3',
'Sep 19, 2015 @ 06:50:13.000100001\n1'
];
expect(actualRowsText).to.eql(expectedRowsText);
});
});
}

View file

@ -41,6 +41,7 @@ export default function ({ getService, getPageObjects, loadTestFile }) {
loadTestFile(require.resolve('./_discover_navigation'));
loadTestFile(require.resolve('./_filters'));
loadTestFile(require.resolve('./_size'));
loadTestFile(require.resolve('./_date_nanos'));
});
}

View file

@ -23,8 +23,8 @@ export default function ({ getService, getPageObjects }) {
const esArchiver = getService('esArchiver');
const PageObjects = getPageObjects(['common', 'timePicker', 'discover']);
const kibanaServer = getService('kibanaServer');
const fromTime = '2015-09-19 06:31:44.000';
const toTime = '2015-09-23 18:31:44.000';
const fromTime = '2019-09-22 20:31:44.000';
const toTime = '2019-09-23 03:31:44.000';
describe('date_nanos', function () {
@ -41,10 +41,10 @@ export default function ({ getService, getPageObjects }) {
it('should show a timestamp with nanoseconds in the first result row', async function () {
const time = await PageObjects.timePicker.getTimeConfig();
expect(time.start).to.be('Sep 19, 2015 @ 06:31:44.000');
expect(time.end).to.be('Sep 23, 2015 @ 18:31:44.000');
expect(time.start).to.be('Sep 22, 2019 @ 20:31:44.000');
expect(time.end).to.be('Sep 23, 2019 @ 03:31:44.000');
const rowData = await PageObjects.discover.getDocTableIndex(1);
expect(rowData.startsWith('Sep 22, 2015 @ 23:50:13.253123345')).to.be.ok();
expect(rowData.startsWith('Sep 22, 2019 @ 23:50:13.253123345')).to.be.ok();
});
});

View file

@ -48,8 +48,145 @@
"id": "AU_x3-TaGFA8no6QjiSJ",
"index": "date-nanos",
"source": {
"@message" : "1",
"@timestamp": "2015-09-22T23:50:13.253123345Z",
"@message" : "5",
"@timestamp": "2019-09-22T23:50:13.253123345Z",
"referer": "http://twitter.com/error/takuya-onishi",
"request": "/uploads/dafydd-williams.jpg",
"response": "200",
"type": "apache",
"url": "https://media-for-the-masses.theacademyofperformingartsandscience.org/uploads/dafydd-williams.jpg"
}
}
}
{
"type": "doc",
"value": {
"id": "AU_x3-TaGFA8no6Qjisd",
"index": "date-nanos",
"source": {
"@message" : "0",
"@timestamp": "2019-09-18T06:50:13.000000101Z",
"referer": "http://twitter.com/error/takuya-onishi",
"request": "/uploads/dafydd-williams.jpg",
"response": "200",
"type": "apache",
"url": "https://media-for-the-masses.theacademyofperformingartsandscience.org/uploads/dafydd-williams.jpg"
}
}
}
{
"type": "doc",
"value": {
"id": "AU_x3-TaGFA8no6Qji102Z",
"index": "date-nanos",
"source": {
"@message" : "1",
"@timestamp": "2019-09-18T06:50:13.000000102Z",
"referer": "http://twitter.com/error/takuya-onishi",
"request": "/uploads/dafydd-williams.jpg",
"response": "200",
"type": "apache",
"url": "https://media-for-the-masses.theacademyofperformingartsandscience.org/uploads/dafydd-williams.jpg"
}
}
}
{
"type": "doc",
"value": {
"id": "AU_x3-TaGFA8no6Qjis104Z",
"index": "date-nanos",
"source": {
"@message" : "4",
"@timestamp": "2019-09-18T06:50:13.000000104Z",
"referer": "http://twitter.com/error/takuya-onishi",
"request": "/uploads/dafydd-williams.jpg",
"response": "200",
"type": "apache",
"url": "https://media-for-the-masses.theacademyofperformingartsandscience.org/uploads/dafydd-williams.jpg"
}
}
}
{
"type": "doc",
"value": {
"id": "BU_x3-TaGFA8no6Qjis103Z",
"index": "date-nanos",
"source": {
"@message" : "2",
"@timestamp": "2019-09-18T06:50:13.000000103Z",
"referer": "http://twitter.com/error/takuya-onishi",
"request": "/uploads/dafydd-williams.jpg",
"response": "200",
"type": "apache",
"url": "https://media-for-the-masses.theacademyofperformingartsandscience.org/uploads/dafydd-williams.jpg"
}
}
}
{
"type": "doc",
"value": {
"id": "CU_x3-TaGFA8no6QjiSX000Z",
"index": "date-nanos",
"source": {
"@message" : "-2",
"@timestamp": "2019-09-18T06:50:13.000Z",
"referer": "http://twitter.com/error/takuya-onishi",
"request": "/uploads/dafydd-williams.jpg",
"response": "200",
"type": "apache",
"url": "https://media-for-the-masses.theacademyofperformingartsandscience.org/uploads/dafydd-williams.jpg"
}
}
}
{
"type": "doc",
"value": {
"id": "AU_x3-TaGFA8no6Qj999Z",
"index": "date-nanos",
"source": {
"@message" : "-3",
"@timestamp": "2019-09-18T06:50:12.999999999Z",
"referer": "http://twitter.com/error/takuya-onishi",
"request": "/uploads/dafydd-williams.jpg",
"response": "200",
"type": "apache",
"url": "https://media-for-the-masses.theacademyofperformingartsandscience.org/uploads/dafydd-williams.jpg"
}
}
}
{
"type": "doc",
"value": {
"id": "AU_x3-TaGFA8no6Qsd001Z",
"index": "date-nanos",
"source": {
"@message" : "-1",
"@timestamp": "2019-09-18T06:50:13.000000001Z",
"referer": "http://twitter.com/error/takuya-onishi",
"request": "/uploads/dafydd-williams.jpg",
"response": "200",
"type": "apache",
"url": "https://media-for-the-masses.theacademyofperformingartsandscience.org/uploads/dafydd-williams.jpg"
}
}
}
{
"type": "doc",
"value": {
"id": "AU_x3-TaGFA8no000100001Z",
"index": "date-nanos",
"source": {
"@message" : "1",
"@timestamp": "2015-09-19T06:50:13.000100001Z",
"referer": "http://twitter.com/error/takuya-onishi",
"request": "/uploads/dafydd-williams.jpg",
"response": "200",