[Courier] Remove unused merge duplicate request logic (#33907)

* Remove unused courier logic to merge duplicate requests

* Remove unused courier logic to merge duplicate requests

* Remove accidental console log

* Revert change to filter aborted statuses

* Revert other change

* Revert other change

* Remove space

* Fix test

* Move ngMock back to where it belongs
This commit is contained in:
Lukas Olson 2019-03-28 13:49:50 -07:00 committed by GitHub
parent 283fd2e4a6
commit 518f10d4e1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
5 changed files with 10 additions and 82 deletions

View file

@ -26,7 +26,6 @@ import { delay } from 'bluebird';
import { CallClientProvider } from '../call_client';
import { RequestStatus } from '../req_status';
import { SearchRequestProvider } from '../request';
import { MergeDuplicatesRequestProvider } from '../merge_duplicate_requests';
import { addSearchStrategy } from '../../search_strategy';
describe('callClient', () => {
@ -58,17 +57,7 @@ describe('callClient', () => {
return searchRequest;
};
beforeEach(ngMock.module('kibana', PrivateProvider => {
// We mock this so that we don't need to stub out methods for searchRequest.source, e.g. getId(),
// which is used by mergeDuplicateRequests.
function FakeMergeDuplicatesRequestProvider() {
return function mergeDuplicateRequests(searchRequests) {
return searchRequests;
};
}
PrivateProvider.swap(MergeDuplicatesRequestProvider, FakeMergeDuplicatesRequestProvider);
}));
beforeEach(ngMock.module('kibana'));
beforeEach(ngMock.module(function stubEs($provide) {
esRequestDelay = 0;

View file

@ -20,7 +20,6 @@
import { ErrorAllowExplicitIndexProvider } from '../../error_allow_explicit_index';
import { assignSearchRequestsToSearchStrategies } from '../search_strategy';
import { IsRequestProvider } from './is_request';
import { MergeDuplicatesRequestProvider } from './merge_duplicate_requests';
import { RequestStatus } from './req_status';
import { SerializeFetchParamsProvider } from './request/serialize_fetch_params';
import { i18n } from '@kbn/i18n';
@ -28,21 +27,16 @@ import { i18n } from '@kbn/i18n';
export function CallClientProvider(Private, Promise, es, config) {
const errorAllowExplicitIndex = Private(ErrorAllowExplicitIndexProvider);
const isRequest = Private(IsRequestProvider);
const mergeDuplicateRequests = Private(MergeDuplicatesRequestProvider);
const serializeFetchParams = Private(SerializeFetchParamsProvider);
const ABORTED = RequestStatus.ABORTED;
const DUPLICATE = RequestStatus.DUPLICATE;
function callClient(searchRequests) {
const maxConcurrentShardRequests = config.get('courier:maxConcurrentShardRequests');
const includeFrozen = config.get('search:includeFrozen');
// merging docs can change status to DUPLICATE, capture new statuses
const searchRequestsAndStatuses = mergeDuplicateRequests(searchRequests);
// get the actual list of requests that we will be fetching
const requestsToFetch = searchRequestsAndStatuses.filter(isRequest);
const requestsToFetch = searchRequests.filter(isRequest);
let requestsToFetchCount = requestsToFetch.length;
if (requestsToFetchCount === 0) {
@ -62,23 +56,19 @@ export function CallClientProvider(Private, Promise, es, config) {
// Respond to each searchRequest with the response or ABORTED.
const respondToSearchRequests = (responsesInOriginalRequestOrder = []) => {
// We map over searchRequestsAndStatuses because if we were originally provided an ABORTED
// We map over searchRequests because if we were originally provided an ABORTED
// request then we'll return that value.
return Promise.map(searchRequestsAndStatuses, function (searchRequest, searchRequestIndex) {
return Promise.map(searchRequests, function (searchRequest, searchRequestIndex) {
if (searchRequest.aborted) {
return ABORTED;
}
const status = searchRequestsAndStatuses[searchRequestIndex];
const status = searchRequests[searchRequestIndex];
if (status === ABORTED) {
return ABORTED;
}
if (status === DUPLICATE) {
return searchRequest._uniq.resp;
}
const activeSearchRequestIndex = activeSearchRequests.indexOf(searchRequest);
const isFailedSearchRequest = activeSearchRequestIndex === -1;
@ -96,7 +86,7 @@ export function CallClientProvider(Private, Promise, es, config) {
// handle a request being aborted while being fetched
const requestWasAborted = Promise.method(function (searchRequest, index) {
if (searchRequestsAndStatuses[index] === ABORTED) {
if (searchRequests[index] === ABORTED) {
defer.reject(new Error(
i18n.translate('common.ui.courier.fetch.requestWasAbortedTwiceErrorMessage', {
defaultMessage: 'Request was aborted twice?',
@ -121,7 +111,7 @@ export function CallClientProvider(Private, Promise, es, config) {
});
// attach abort handlers, close over request index
searchRequestsAndStatuses.forEach(function (searchRequest, index) {
searchRequests.forEach(function (searchRequest, index) {
if (!isRequest(searchRequest)) {
return;
}
@ -178,11 +168,11 @@ export function CallClientProvider(Private, Promise, es, config) {
// Assigning searchRequests to strategies means that the responses come back in a different
// order than the original searchRequests. So we'll put them back in order so that we can
// use the order to associate each response with the original request.
const responsesInOriginalRequestOrder = new Array(searchRequestsAndStatuses.length);
const responsesInOriginalRequestOrder = new Array(searchRequests.length);
segregatedResponses.forEach((responses, strategyIndex) => {
responses.forEach((response, responseIndex) => {
const searchRequest = searchStrategiesWithRequests[strategyIndex].searchRequests[responseIndex];
const requestIndex = searchRequestsAndStatuses.indexOf(searchRequest);
const requestIndex = searchRequests.indexOf(searchRequest);
responsesInOriginalRequestOrder[requestIndex] = response;
});
});
@ -204,7 +194,7 @@ export function CallClientProvider(Private, Promise, es, config) {
// By returning the return value of this catch() without rethrowing the error, we delegate
// error-handling to the searchRequest instead of the consumer.
searchRequests.forEach((searchRequest, index) => {
if (searchRequestsAndStatuses[index] !== ABORTED) {
if (searchRequests[index] !== ABORTED) {
searchRequest.handleFailure(err);
}
});

View file

@ -42,7 +42,6 @@ export function FetchNowProvider(Private, Promise) {
const continueIncomplete = Private(ContinueIncompleteProvider);
const ABORTED = RequestStatus.ABORTED;
const DUPLICATE = RequestStatus.DUPLICATE;
const INCOMPLETE = RequestStatus.INCOMPLETE;
function fetchNow(searchRequests) {
@ -94,7 +93,6 @@ export function FetchNowProvider(Private, Promise) {
switch (resp) {
case ABORTED:
return null;
case DUPLICATE:
case INCOMPLETE:
throw new Error(
i18n.translate('common.ui.courier.fetch.failedToClearRequestErrorMessage', {

View file

@ -1,48 +0,0 @@
/*
* Licensed to Elasticsearch B.V. under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch B.V. licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import { IsRequestProvider } from './is_request';
import { RequestStatus } from './req_status';
export function MergeDuplicatesRequestProvider(Private) {
const isRequest = Private(IsRequestProvider);
const DUPLICATE = RequestStatus.DUPLICATE;
function mergeDuplicateRequests(requests) {
// dedupe requests
const index = {};
return requests.map(function (req) {
if (!isRequest(req)) return req;
const searchSourceId = req.source.getId();
if (!index[searchSourceId]) {
// this request is unique so far
index[searchSourceId] = req;
// keep the request
return req;
}
// the source was requested at least twice
req._uniq = index[searchSourceId];
return DUPLICATE;
});
}
return mergeDuplicateRequests;
}

View file

@ -19,6 +19,5 @@
export const RequestStatus = {
ABORTED: 'aborted',
DUPLICATE: 'duplicate',
INCOMPLETE: 'incomplete',
};